| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 696 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 707 __ Subu(result_reg, zero_reg, input_high); | 707 __ Subu(result_reg, zero_reg, input_high); |
| 708 __ Movz(result_reg, input_high, scratch); | 708 __ Movz(result_reg, input_high, scratch); |
| 709 | 709 |
| 710 __ bind(&done); | 710 __ bind(&done); |
| 711 | 711 |
| 712 __ Pop(scratch, scratch2, scratch3); | 712 __ Pop(scratch, scratch2, scratch3); |
| 713 __ Ret(); | 713 __ Ret(); |
| 714 } | 714 } |
| 715 | 715 |
| 716 | 716 |
| 717 bool WriteInt32ToHeapNumberStub::IsPregenerated(Isolate* isolate) { | |
| 718 // These variants are compiled ahead of time. See next method. | |
| 719 if (the_int_.is(a1) && | |
| 720 the_heap_number_.is(v0) && | |
| 721 scratch_.is(a2) && | |
| 722 sign_.is(a3)) { | |
| 723 return true; | |
| 724 } | |
| 725 if (the_int_.is(a2) && | |
| 726 the_heap_number_.is(v0) && | |
| 727 scratch_.is(a3) && | |
| 728 sign_.is(a0)) { | |
| 729 return true; | |
| 730 } | |
| 731 // Other register combinations are generated as and when they are needed, | |
| 732 // so it is unsafe to call them from stubs (we can't generate a stub while | |
| 733 // we are generating a stub). | |
| 734 return false; | |
| 735 } | |
| 736 | |
| 737 | |
| 738 void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime( | 717 void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime( |
| 739 Isolate* isolate) { | 718 Isolate* isolate) { |
| 740 WriteInt32ToHeapNumberStub stub1(a1, v0, a2, a3); | 719 WriteInt32ToHeapNumberStub stub1(a1, v0, a2, a3); |
| 741 WriteInt32ToHeapNumberStub stub2(a2, v0, a3, a0); | 720 WriteInt32ToHeapNumberStub stub2(a2, v0, a3, a0); |
| 742 stub1.GetCode(isolate)->set_is_pregenerated(true); | 721 stub1.GetCode(isolate)->set_is_pregenerated(true); |
| 743 stub2.GetCode(isolate)->set_is_pregenerated(true); | 722 stub2.GetCode(isolate)->set_is_pregenerated(true); |
| 744 } | 723 } |
| 745 | 724 |
| 746 | 725 |
| 747 // See comment for class, this does NOT work for int32's that are in Smi range. | 726 // See comment for class, this does NOT work for int32's that are in Smi range. |
| (...skipping 994 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1742 __ Ret(); | 1721 __ Ret(); |
| 1743 } | 1722 } |
| 1744 } | 1723 } |
| 1745 | 1724 |
| 1746 | 1725 |
| 1747 bool CEntryStub::NeedsImmovableCode() { | 1726 bool CEntryStub::NeedsImmovableCode() { |
| 1748 return true; | 1727 return true; |
| 1749 } | 1728 } |
| 1750 | 1729 |
| 1751 | 1730 |
| 1752 bool CEntryStub::IsPregenerated(Isolate* isolate) { | |
| 1753 return (!save_doubles_ || isolate->fp_stubs_generated()) && | |
| 1754 result_size_ == 1; | |
| 1755 } | |
| 1756 | |
| 1757 | |
| 1758 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 1731 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
| 1759 CEntryStub::GenerateAheadOfTime(isolate); | 1732 CEntryStub::GenerateAheadOfTime(isolate); |
| 1760 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate); | 1733 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate); |
| 1761 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 1734 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
| 1762 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 1735 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
| 1763 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); | |
| 1764 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); | 1736 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
| 1765 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); | 1737 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); |
| 1766 BinaryOpStub::GenerateAheadOfTime(isolate); | 1738 BinaryOpStub::GenerateAheadOfTime(isolate); |
| 1767 } | 1739 } |
| 1768 | 1740 |
| 1769 | 1741 |
| 1770 void CodeStub::GenerateFPStubs(Isolate* isolate) { | 1742 void CodeStub::GenerateFPStubs(Isolate* isolate) { |
| 1771 SaveFPRegsMode mode = kSaveFPRegs; | 1743 SaveFPRegsMode mode = kSaveFPRegs; |
| 1772 CEntryStub save_doubles(1, mode); | 1744 CEntryStub save_doubles(1, mode); |
| 1773 StoreBufferOverflowStub stub(mode); | 1745 StoreBufferOverflowStub stub(mode); |
| (...skipping 3725 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5499 __ bind(&in_dictionary); | 5471 __ bind(&in_dictionary); |
| 5500 __ Ret(USE_DELAY_SLOT); | 5472 __ Ret(USE_DELAY_SLOT); |
| 5501 __ li(result, 1); | 5473 __ li(result, 1); |
| 5502 | 5474 |
| 5503 __ bind(¬_in_dictionary); | 5475 __ bind(¬_in_dictionary); |
| 5504 __ Ret(USE_DELAY_SLOT); | 5476 __ Ret(USE_DELAY_SLOT); |
| 5505 __ mov(result, zero_reg); | 5477 __ mov(result, zero_reg); |
| 5506 } | 5478 } |
| 5507 | 5479 |
| 5508 | 5480 |
| 5509 struct AheadOfTimeWriteBarrierStubList { | |
| 5510 Register object, value, address; | |
| 5511 RememberedSetAction action; | |
| 5512 }; | |
| 5513 | |
| 5514 | |
| 5515 #define REG(Name) { kRegister_ ## Name ## _Code } | |
| 5516 | |
| 5517 static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = { | |
| 5518 // Used in RegExpExecStub. | |
| 5519 { REG(s2), REG(s0), REG(t3), EMIT_REMEMBERED_SET }, | |
| 5520 // Used in CompileArrayPushCall. | |
| 5521 // Also used in StoreIC::GenerateNormal via GenerateDictionaryStore. | |
| 5522 // Also used in KeyedStoreIC::GenerateGeneric. | |
| 5523 { REG(a3), REG(t0), REG(t1), EMIT_REMEMBERED_SET }, | |
| 5524 // Used in StoreStubCompiler::CompileStoreField via GenerateStoreField. | |
| 5525 { REG(a1), REG(a2), REG(a3), EMIT_REMEMBERED_SET }, | |
| 5526 { REG(a3), REG(a2), REG(a1), EMIT_REMEMBERED_SET }, | |
| 5527 // Used in KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField. | |
| 5528 { REG(a2), REG(a1), REG(a3), EMIT_REMEMBERED_SET }, | |
| 5529 { REG(a3), REG(a1), REG(a2), EMIT_REMEMBERED_SET }, | |
| 5530 // KeyedStoreStubCompiler::GenerateStoreFastElement. | |
| 5531 { REG(a3), REG(a2), REG(t0), EMIT_REMEMBERED_SET }, | |
| 5532 { REG(a2), REG(a3), REG(t0), EMIT_REMEMBERED_SET }, | |
| 5533 // ElementsTransitionGenerator::GenerateMapChangeElementTransition | |
| 5534 // and ElementsTransitionGenerator::GenerateSmiToDouble | |
| 5535 // and ElementsTransitionGenerator::GenerateDoubleToObject | |
| 5536 { REG(a2), REG(a3), REG(t5), EMIT_REMEMBERED_SET }, | |
| 5537 { REG(a2), REG(a3), REG(t5), OMIT_REMEMBERED_SET }, | |
| 5538 // ElementsTransitionGenerator::GenerateDoubleToObject | |
| 5539 { REG(t2), REG(a2), REG(a0), EMIT_REMEMBERED_SET }, | |
| 5540 { REG(a2), REG(t2), REG(t5), EMIT_REMEMBERED_SET }, | |
| 5541 // StoreArrayLiteralElementStub::Generate | |
| 5542 { REG(t1), REG(a0), REG(t2), EMIT_REMEMBERED_SET }, | |
| 5543 // FastNewClosureStub::Generate | |
| 5544 { REG(a2), REG(t0), REG(a1), EMIT_REMEMBERED_SET }, | |
| 5545 // StringAddStub::Generate | |
| 5546 { REG(t3), REG(a1), REG(t0), EMIT_REMEMBERED_SET }, | |
| 5547 { REG(t3), REG(a0), REG(t0), EMIT_REMEMBERED_SET }, | |
| 5548 // Null termination. | |
| 5549 { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET} | |
| 5550 }; | |
| 5551 | |
| 5552 #undef REG | |
| 5553 | |
| 5554 | |
| 5555 bool RecordWriteStub::IsPregenerated(Isolate* isolate) { | |
| 5556 for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; | |
| 5557 !entry->object.is(no_reg); | |
| 5558 entry++) { | |
| 5559 if (object_.is(entry->object) && | |
| 5560 value_.is(entry->value) && | |
| 5561 address_.is(entry->address) && | |
| 5562 remembered_set_action_ == entry->action && | |
| 5563 save_fp_regs_mode_ == kDontSaveFPRegs) { | |
| 5564 return true; | |
| 5565 } | |
| 5566 } | |
| 5567 return false; | |
| 5568 } | |
| 5569 | |
| 5570 | |
| 5571 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( | 5481 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( |
| 5572 Isolate* isolate) { | 5482 Isolate* isolate) { |
| 5573 StoreBufferOverflowStub stub1(kDontSaveFPRegs); | 5483 StoreBufferOverflowStub stub1(kDontSaveFPRegs); |
| 5574 stub1.GetCode(isolate)->set_is_pregenerated(true); | 5484 stub1.GetCode(isolate)->set_is_pregenerated(true); |
| 5575 // Hydrogen code stubs need stub2 at snapshot time. | 5485 // Hydrogen code stubs need stub2 at snapshot time. |
| 5576 StoreBufferOverflowStub stub2(kSaveFPRegs); | 5486 StoreBufferOverflowStub stub2(kSaveFPRegs); |
| 5577 stub2.GetCode(isolate)->set_is_pregenerated(true); | 5487 stub2.GetCode(isolate)->set_is_pregenerated(true); |
| 5578 } | 5488 } |
| 5579 | 5489 |
| 5580 | 5490 |
| 5581 void RecordWriteStub::GenerateFixedRegStubsAheadOfTime(Isolate* isolate) { | |
| 5582 for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; | |
| 5583 !entry->object.is(no_reg); | |
| 5584 entry++) { | |
| 5585 RecordWriteStub stub(entry->object, | |
| 5586 entry->value, | |
| 5587 entry->address, | |
| 5588 entry->action, | |
| 5589 kDontSaveFPRegs); | |
| 5590 stub.GetCode(isolate)->set_is_pregenerated(true); | |
| 5591 } | |
| 5592 } | |
| 5593 | |
| 5594 | |
| 5595 bool CodeStub::CanUseFPRegisters() { | 5491 bool CodeStub::CanUseFPRegisters() { |
| 5596 return true; // FPU is a base requirement for V8. | 5492 return true; // FPU is a base requirement for V8. |
| 5597 } | 5493 } |
| 5598 | 5494 |
| 5599 | 5495 |
| 5600 // Takes the input in 3 registers: address_ value_ and object_. A pointer to | 5496 // Takes the input in 3 registers: address_ value_ and object_. A pointer to |
| 5601 // the value has just been written into the object, now this stub makes sure | 5497 // the value has just been written into the object, now this stub makes sure |
| 5602 // we keep the GC informed. The word in the object where the value has been | 5498 // we keep the GC informed. The word in the object where the value has been |
| 5603 // written is in the address register. | 5499 // written is in the address register. |
| 5604 void RecordWriteStub::Generate(MacroAssembler* masm) { | 5500 void RecordWriteStub::Generate(MacroAssembler* masm) { |
| (...skipping 286 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5891 __ Subu(a0, a0, 1); | 5787 __ Subu(a0, a0, 1); |
| 5892 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 5788 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
| 5893 ParameterCount argument_count(a0); | 5789 ParameterCount argument_count(a0); |
| 5894 __ InvokeFunction( | 5790 __ InvokeFunction( |
| 5895 a1, argument_count, JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); | 5791 a1, argument_count, JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); |
| 5896 } | 5792 } |
| 5897 | 5793 |
| 5898 | 5794 |
| 5899 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 5795 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
| 5900 if (masm->isolate()->function_entry_hook() != NULL) { | 5796 if (masm->isolate()->function_entry_hook() != NULL) { |
| 5901 AllowStubCallsScope allow_stub_calls(masm, true); | |
| 5902 ProfileEntryHookStub stub; | 5797 ProfileEntryHookStub stub; |
| 5903 __ push(ra); | 5798 __ push(ra); |
| 5904 __ CallStub(&stub); | 5799 __ CallStub(&stub); |
| 5905 __ pop(ra); | 5800 __ pop(ra); |
| 5906 } | 5801 } |
| 5907 } | 5802 } |
| 5908 | 5803 |
| 5909 | 5804 |
| 5910 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { | 5805 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { |
| 5911 // The entry hook is a "push ra" instruction, followed by a call. | 5806 // The entry hook is a "push ra" instruction, followed by a call. |
| (...skipping 374 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6286 __ bind(&fast_elements_case); | 6181 __ bind(&fast_elements_case); |
| 6287 GenerateCase(masm, FAST_ELEMENTS); | 6182 GenerateCase(masm, FAST_ELEMENTS); |
| 6288 } | 6183 } |
| 6289 | 6184 |
| 6290 | 6185 |
| 6291 #undef __ | 6186 #undef __ |
| 6292 | 6187 |
| 6293 } } // namespace v8::internal | 6188 } } // namespace v8::internal |
| 6294 | 6189 |
| 6295 #endif // V8_TARGET_ARCH_MIPS | 6190 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |