OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 631 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
642 __ Subu(result_reg, zero_reg, input_high); | 642 __ Subu(result_reg, zero_reg, input_high); |
643 __ Movz(result_reg, input_high, scratch); | 643 __ Movz(result_reg, input_high, scratch); |
644 | 644 |
645 __ bind(&done); | 645 __ bind(&done); |
646 | 646 |
647 __ Pop(scratch, scratch2, scratch3); | 647 __ Pop(scratch, scratch2, scratch3); |
648 __ Ret(); | 648 __ Ret(); |
649 } | 649 } |
650 | 650 |
651 | 651 |
652 bool WriteInt32ToHeapNumberStub::IsPregenerated() { | 652 bool WriteInt32ToHeapNumberStub::IsPregenerated(Isolate* isolate) { |
653 // These variants are compiled ahead of time. See next method. | 653 // These variants are compiled ahead of time. See next method. |
654 if (the_int_.is(a1) && | 654 if (the_int_.is(a1) && |
655 the_heap_number_.is(v0) && | 655 the_heap_number_.is(v0) && |
656 scratch_.is(a2) && | 656 scratch_.is(a2) && |
657 sign_.is(a3)) { | 657 sign_.is(a3)) { |
658 return true; | 658 return true; |
659 } | 659 } |
660 if (the_int_.is(a2) && | 660 if (the_int_.is(a2) && |
661 the_heap_number_.is(v0) && | 661 the_heap_number_.is(v0) && |
662 scratch_.is(a3) && | 662 scratch_.is(a3) && |
(...skipping 2053 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2716 __ Ret(); | 2716 __ Ret(); |
2717 } | 2717 } |
2718 } | 2718 } |
2719 | 2719 |
2720 | 2720 |
2721 bool CEntryStub::NeedsImmovableCode() { | 2721 bool CEntryStub::NeedsImmovableCode() { |
2722 return true; | 2722 return true; |
2723 } | 2723 } |
2724 | 2724 |
2725 | 2725 |
2726 bool CEntryStub::IsPregenerated() { | 2726 bool CEntryStub::IsPregenerated(Isolate* isolate) { |
2727 return (!save_doubles_ || Isolate::Current()->fp_stubs_generated()) && | 2727 return (!save_doubles_ || isolate->fp_stubs_generated()) && |
2728 result_size_ == 1; | 2728 result_size_ == 1; |
2729 } | 2729 } |
2730 | 2730 |
2731 | 2731 |
2732 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 2732 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
2733 CEntryStub::GenerateAheadOfTime(isolate); | 2733 CEntryStub::GenerateAheadOfTime(isolate); |
2734 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate); | 2734 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate); |
2735 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 2735 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
2736 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 2736 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
2737 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); | 2737 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); |
(...skipping 3842 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6580 // StringAddStub::Generate | 6580 // StringAddStub::Generate |
6581 { REG(t3), REG(a1), REG(t0), EMIT_REMEMBERED_SET }, | 6581 { REG(t3), REG(a1), REG(t0), EMIT_REMEMBERED_SET }, |
6582 { REG(t3), REG(a0), REG(t0), EMIT_REMEMBERED_SET }, | 6582 { REG(t3), REG(a0), REG(t0), EMIT_REMEMBERED_SET }, |
6583 // Null termination. | 6583 // Null termination. |
6584 { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET} | 6584 { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET} |
6585 }; | 6585 }; |
6586 | 6586 |
6587 #undef REG | 6587 #undef REG |
6588 | 6588 |
6589 | 6589 |
6590 bool RecordWriteStub::IsPregenerated() { | 6590 bool RecordWriteStub::IsPregenerated(Isolate* isolate) { |
6591 for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; | 6591 for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; |
6592 !entry->object.is(no_reg); | 6592 !entry->object.is(no_reg); |
6593 entry++) { | 6593 entry++) { |
6594 if (object_.is(entry->object) && | 6594 if (object_.is(entry->object) && |
6595 value_.is(entry->value) && | 6595 value_.is(entry->value) && |
6596 address_.is(entry->address) && | 6596 address_.is(entry->address) && |
6597 remembered_set_action_ == entry->action && | 6597 remembered_set_action_ == entry->action && |
6598 save_fp_regs_mode_ == kDontSaveFPRegs) { | 6598 save_fp_regs_mode_ == kDontSaveFPRegs) { |
6599 return true; | 6599 return true; |
6600 } | 6600 } |
(...skipping 654 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7255 __ bind(&fast_elements_case); | 7255 __ bind(&fast_elements_case); |
7256 GenerateCase(masm, FAST_ELEMENTS); | 7256 GenerateCase(masm, FAST_ELEMENTS); |
7257 } | 7257 } |
7258 | 7258 |
7259 | 7259 |
7260 #undef __ | 7260 #undef __ |
7261 | 7261 |
7262 } } // namespace v8::internal | 7262 } } // namespace v8::internal |
7263 | 7263 |
7264 #endif // V8_TARGET_ARCH_MIPS | 7264 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |