| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_PPC | 7 #if V8_TARGET_ARCH_PPC |
| 8 | 8 |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
| (...skipping 723 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 734 } | 734 } |
| 735 | 735 |
| 736 | 736 |
| 737 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { | 737 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { |
| 738 // We don't allow a GC during a store buffer overflow so there is no need to | 738 // We don't allow a GC during a store buffer overflow so there is no need to |
| 739 // store the registers in any particular way, but we do have to store and | 739 // store the registers in any particular way, but we do have to store and |
| 740 // restore them. | 740 // restore them. |
| 741 __ mflr(r0); | 741 __ mflr(r0); |
| 742 __ MultiPush(kJSCallerSaved | r0.bit()); | 742 __ MultiPush(kJSCallerSaved | r0.bit()); |
| 743 if (save_doubles()) { | 743 if (save_doubles()) { |
| 744 __ SaveFPRegs(sp, 0, DoubleRegister::kNumVolatileRegisters); | 744 __ MultiPushDoubles(kCallerSavedDoubles); |
| 745 } | 745 } |
| 746 const int argument_count = 1; | 746 const int argument_count = 1; |
| 747 const int fp_argument_count = 0; | 747 const int fp_argument_count = 0; |
| 748 const Register scratch = r4; | 748 const Register scratch = r4; |
| 749 | 749 |
| 750 AllowExternalCallThatCantCauseGC scope(masm); | 750 AllowExternalCallThatCantCauseGC scope(masm); |
| 751 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch); | 751 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch); |
| 752 __ mov(r3, Operand(ExternalReference::isolate_address(isolate()))); | 752 __ mov(r3, Operand(ExternalReference::isolate_address(isolate()))); |
| 753 __ CallCFunction(ExternalReference::store_buffer_overflow_function(isolate()), | 753 __ CallCFunction(ExternalReference::store_buffer_overflow_function(isolate()), |
| 754 argument_count); | 754 argument_count); |
| 755 if (save_doubles()) { | 755 if (save_doubles()) { |
| 756 __ RestoreFPRegs(sp, 0, DoubleRegister::kNumVolatileRegisters); | 756 __ MultiPopDoubles(kCallerSavedDoubles); |
| 757 } | 757 } |
| 758 __ MultiPop(kJSCallerSaved | r0.bit()); | 758 __ MultiPop(kJSCallerSaved | r0.bit()); |
| 759 __ mtlr(r0); | 759 __ mtlr(r0); |
| 760 __ Ret(); | 760 __ Ret(); |
| 761 } | 761 } |
| 762 | 762 |
| 763 | 763 |
| 764 void StoreRegistersStateStub::Generate(MacroAssembler* masm) { | 764 void StoreRegistersStateStub::Generate(MacroAssembler* masm) { |
| 765 __ PushSafepointRegisters(); | 765 __ PushSafepointRegisters(); |
| 766 __ blr(); | 766 __ blr(); |
| (...skipping 452 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1219 ProfileEntryHookStub::MaybeCallEntryHook(masm); | 1219 ProfileEntryHookStub::MaybeCallEntryHook(masm); |
| 1220 | 1220 |
| 1221 // PPC LINUX ABI: | 1221 // PPC LINUX ABI: |
| 1222 // preserve LR in pre-reserved slot in caller's frame | 1222 // preserve LR in pre-reserved slot in caller's frame |
| 1223 __ mflr(r0); | 1223 __ mflr(r0); |
| 1224 __ StoreP(r0, MemOperand(sp, kStackFrameLRSlot * kPointerSize)); | 1224 __ StoreP(r0, MemOperand(sp, kStackFrameLRSlot * kPointerSize)); |
| 1225 | 1225 |
| 1226 // Save callee saved registers on the stack. | 1226 // Save callee saved registers on the stack. |
| 1227 __ MultiPush(kCalleeSaved); | 1227 __ MultiPush(kCalleeSaved); |
| 1228 | 1228 |
| 1229 // Floating point regs FPR0 - FRP13 are volatile | 1229 // Save callee-saved double registers. |
| 1230 // FPR14-FPR31 are non-volatile, but sub-calls will save them for us | 1230 __ MultiPushDoubles(kCalleeSavedDoubles); |
| 1231 | 1231 // Set up the reserved register for 0.0. |
| 1232 // int offset_to_argv = kPointerSize * 22; // matches (22*4) above | 1232 __ LoadDoubleLiteral(kDoubleRegZero, 0.0, r0); |
| 1233 // __ lwz(r7, MemOperand(sp, offset_to_argv)); | |
| 1234 | 1233 |
| 1235 // Push a frame with special values setup to mark it as an entry frame. | 1234 // Push a frame with special values setup to mark it as an entry frame. |
| 1236 // r3: code entry | 1235 // r3: code entry |
| 1237 // r4: function | 1236 // r4: function |
| 1238 // r5: receiver | 1237 // r5: receiver |
| 1239 // r6: argc | 1238 // r6: argc |
| 1240 // r7: argv | 1239 // r7: argv |
| 1241 __ li(r0, Operand(-1)); // Push a bad frame pointer to fail if it is used. | 1240 __ li(r0, Operand(-1)); // Push a bad frame pointer to fail if it is used. |
| 1242 __ push(r0); | 1241 __ push(r0); |
| 1243 if (FLAG_enable_embedded_constant_pool) { | 1242 if (FLAG_enable_embedded_constant_pool) { |
| (...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1345 __ bind(&non_outermost_js_2); | 1344 __ bind(&non_outermost_js_2); |
| 1346 | 1345 |
| 1347 // Restore the top frame descriptors from the stack. | 1346 // Restore the top frame descriptors from the stack. |
| 1348 __ pop(r6); | 1347 __ pop(r6); |
| 1349 __ mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); | 1348 __ mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); |
| 1350 __ StoreP(r6, MemOperand(ip)); | 1349 __ StoreP(r6, MemOperand(ip)); |
| 1351 | 1350 |
| 1352 // Reset the stack to the callee saved registers. | 1351 // Reset the stack to the callee saved registers. |
| 1353 __ addi(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); | 1352 __ addi(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); |
| 1354 | 1353 |
| 1355 // Restore callee-saved registers and return. | 1354 // Restore callee-saved double registers. |
| 1356 #ifdef DEBUG | 1355 __ MultiPopDoubles(kCalleeSavedDoubles); |
| 1357 if (FLAG_debug_code) { | |
| 1358 Label here; | |
| 1359 __ b(&here, SetLK); | |
| 1360 __ bind(&here); | |
| 1361 } | |
| 1362 #endif | |
| 1363 | 1356 |
| 1357 // Restore callee-saved registers. |
| 1364 __ MultiPop(kCalleeSaved); | 1358 __ MultiPop(kCalleeSaved); |
| 1365 | 1359 |
| 1360 // Return |
| 1366 __ LoadP(r0, MemOperand(sp, kStackFrameLRSlot * kPointerSize)); | 1361 __ LoadP(r0, MemOperand(sp, kStackFrameLRSlot * kPointerSize)); |
| 1367 __ mtctr(r0); | 1362 __ mtlr(r0); |
| 1368 __ bctr(); | 1363 __ blr(); |
| 1369 } | 1364 } |
| 1370 | 1365 |
| 1371 | 1366 |
| 1372 // Uses registers r3 to r7. | 1367 // Uses registers r3 to r7. |
| 1373 // Expected input (depending on whether args are in registers or on the stack): | 1368 // Expected input (depending on whether args are in registers or on the stack): |
| 1374 // * object: r3 or at sp + 1 * kPointerSize. | 1369 // * object: r3 or at sp + 1 * kPointerSize. |
| 1375 // * function: r4 or at sp. | 1370 // * function: r4 or at sp. |
| 1376 // | 1371 // |
| 1377 // An inlined call site may have been generated before calling this stub. | 1372 // An inlined call site may have been generated before calling this stub. |
| 1378 // In this case the offset to the inline site to patch is passed in r8. | 1373 // In this case the offset to the inline site to patch is passed in r8. |
| (...skipping 4449 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5828 kStackUnwindSpace, NULL, | 5823 kStackUnwindSpace, NULL, |
| 5829 MemOperand(fp, 6 * kPointerSize), NULL); | 5824 MemOperand(fp, 6 * kPointerSize), NULL); |
| 5830 } | 5825 } |
| 5831 | 5826 |
| 5832 | 5827 |
| 5833 #undef __ | 5828 #undef __ |
| 5834 } // namespace internal | 5829 } // namespace internal |
| 5835 } // namespace v8 | 5830 } // namespace v8 |
| 5836 | 5831 |
| 5837 #endif // V8_TARGET_ARCH_PPC | 5832 #endif // V8_TARGET_ARCH_PPC |
| OLD | NEW |