| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 700 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 711 | 711 |
| 712 __ Pop(scratch, scratch2, scratch3); | 712 __ Pop(scratch, scratch2, scratch3); |
| 713 __ Ret(); | 713 __ Ret(); |
| 714 } | 714 } |
| 715 | 715 |
| 716 | 716 |
| 717 void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime( | 717 void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime( |
| 718 Isolate* isolate) { | 718 Isolate* isolate) { |
| 719 WriteInt32ToHeapNumberStub stub1(a1, v0, a2, a3); | 719 WriteInt32ToHeapNumberStub stub1(a1, v0, a2, a3); |
| 720 WriteInt32ToHeapNumberStub stub2(a2, v0, a3, a0); | 720 WriteInt32ToHeapNumberStub stub2(a2, v0, a3, a0); |
| 721 stub1.GetCode(isolate)->set_is_pregenerated(true); | 721 stub1.GetCode(isolate); |
| 722 stub2.GetCode(isolate)->set_is_pregenerated(true); | 722 stub2.GetCode(isolate); |
| 723 } | 723 } |
| 724 | 724 |
| 725 | 725 |
| 726 // See comment for class, this does NOT work for int32's that are in Smi range. | 726 // See comment for class, this does NOT work for int32's that are in Smi range. |
| 727 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) { | 727 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) { |
| 728 Label max_negative_int; | 728 Label max_negative_int; |
| 729 // the_int_ has the answer which is a signed int32 but not a Smi. | 729 // the_int_ has the answer which is a signed int32 but not a Smi. |
| 730 // We test for the special value that has a different exponent. | 730 // We test for the special value that has a different exponent. |
| 731 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u); | 731 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u); |
| 732 // Test sign, and save for later conditionals. | 732 // Test sign, and save for later conditionals. |
| (...skipping 1014 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1747 // regenerate, which would lead to code stub initialization state being messed | 1747 // regenerate, which would lead to code stub initialization state being messed |
| 1748 // up. | 1748 // up. |
| 1749 Code* save_doubles_code; | 1749 Code* save_doubles_code; |
| 1750 if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) { | 1750 if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) { |
| 1751 save_doubles_code = *save_doubles.GetCode(isolate); | 1751 save_doubles_code = *save_doubles.GetCode(isolate); |
| 1752 } | 1752 } |
| 1753 Code* store_buffer_overflow_code; | 1753 Code* store_buffer_overflow_code; |
| 1754 if (!stub.FindCodeInCache(&store_buffer_overflow_code, isolate)) { | 1754 if (!stub.FindCodeInCache(&store_buffer_overflow_code, isolate)) { |
| 1755 store_buffer_overflow_code = *stub.GetCode(isolate); | 1755 store_buffer_overflow_code = *stub.GetCode(isolate); |
| 1756 } | 1756 } |
| 1757 save_doubles_code->set_is_pregenerated(true); | |
| 1758 store_buffer_overflow_code->set_is_pregenerated(true); | |
| 1759 isolate->set_fp_stubs_generated(true); | 1757 isolate->set_fp_stubs_generated(true); |
| 1760 } | 1758 } |
| 1761 | 1759 |
| 1762 | 1760 |
| 1763 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { | 1761 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { |
| 1764 CEntryStub stub(1, kDontSaveFPRegs); | 1762 CEntryStub stub(1, kDontSaveFPRegs); |
| 1765 Handle<Code> code = stub.GetCode(isolate); | 1763 stub.GetCode(isolate); |
| 1766 code->set_is_pregenerated(true); | |
| 1767 } | 1764 } |
| 1768 | 1765 |
| 1769 | 1766 |
| 1770 static void JumpIfOOM(MacroAssembler* masm, | 1767 static void JumpIfOOM(MacroAssembler* masm, |
| 1771 Register value, | 1768 Register value, |
| 1772 Register scratch, | 1769 Register scratch, |
| 1773 Label* oom_label) { | 1770 Label* oom_label) { |
| 1774 STATIC_ASSERT(Failure::OUT_OF_MEMORY_EXCEPTION == 3); | 1771 STATIC_ASSERT(Failure::OUT_OF_MEMORY_EXCEPTION == 3); |
| 1775 STATIC_ASSERT(kFailureTag == 3); | 1772 STATIC_ASSERT(kFailureTag == 3); |
| 1776 __ andi(scratch, value, 0xf); | 1773 __ andi(scratch, value, 0xf); |
| (...skipping 3697 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5474 | 5471 |
| 5475 __ bind(¬_in_dictionary); | 5472 __ bind(¬_in_dictionary); |
| 5476 __ Ret(USE_DELAY_SLOT); | 5473 __ Ret(USE_DELAY_SLOT); |
| 5477 __ mov(result, zero_reg); | 5474 __ mov(result, zero_reg); |
| 5478 } | 5475 } |
| 5479 | 5476 |
| 5480 | 5477 |
| 5481 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( | 5478 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( |
| 5482 Isolate* isolate) { | 5479 Isolate* isolate) { |
| 5483 StoreBufferOverflowStub stub1(kDontSaveFPRegs); | 5480 StoreBufferOverflowStub stub1(kDontSaveFPRegs); |
| 5484 stub1.GetCode(isolate)->set_is_pregenerated(true); | 5481 stub1.GetCode(isolate); |
| 5485 // Hydrogen code stubs need stub2 at snapshot time. | 5482 // Hydrogen code stubs need stub2 at snapshot time. |
| 5486 StoreBufferOverflowStub stub2(kSaveFPRegs); | 5483 StoreBufferOverflowStub stub2(kSaveFPRegs); |
| 5487 stub2.GetCode(isolate)->set_is_pregenerated(true); | 5484 stub2.GetCode(isolate); |
| 5488 } | 5485 } |
| 5489 | 5486 |
| 5490 | 5487 |
| 5491 bool CodeStub::CanUseFPRegisters() { | 5488 bool CodeStub::CanUseFPRegisters() { |
| 5492 return true; // FPU is a base requirement for V8. | 5489 return true; // FPU is a base requirement for V8. |
| 5493 } | 5490 } |
| 5494 | 5491 |
| 5495 | 5492 |
| 5496 // Takes the input in 3 registers: address_ value_ and object_. A pointer to | 5493 // Takes the input in 3 registers: address_ value_ and object_. A pointer to |
| 5497 // the value has just been written into the object, now this stub makes sure | 5494 // the value has just been written into the object, now this stub makes sure |
| (...skipping 476 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5974 template<class T> | 5971 template<class T> |
| 5975 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { | 5972 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { |
| 5976 ElementsKind initial_kind = GetInitialFastElementsKind(); | 5973 ElementsKind initial_kind = GetInitialFastElementsKind(); |
| 5977 ElementsKind initial_holey_kind = GetHoleyElementsKind(initial_kind); | 5974 ElementsKind initial_holey_kind = GetHoleyElementsKind(initial_kind); |
| 5978 | 5975 |
| 5979 int to_index = GetSequenceIndexFromFastElementsKind( | 5976 int to_index = GetSequenceIndexFromFastElementsKind( |
| 5980 TERMINAL_FAST_ELEMENTS_KIND); | 5977 TERMINAL_FAST_ELEMENTS_KIND); |
| 5981 for (int i = 0; i <= to_index; ++i) { | 5978 for (int i = 0; i <= to_index; ++i) { |
| 5982 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 5979 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
| 5983 T stub(kind); | 5980 T stub(kind); |
| 5984 stub.GetCode(isolate)->set_is_pregenerated(true); | 5981 stub.GetCode(isolate); |
| 5985 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE || | 5982 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE || |
| 5986 (!FLAG_track_allocation_sites && | 5983 (!FLAG_track_allocation_sites && |
| 5987 (kind == initial_kind || kind == initial_holey_kind))) { | 5984 (kind == initial_kind || kind == initial_holey_kind))) { |
| 5988 T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES); | 5985 T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES); |
| 5989 stub1.GetCode(isolate)->set_is_pregenerated(true); | 5986 stub1.GetCode(isolate); |
| 5990 } | 5987 } |
| 5991 } | 5988 } |
| 5992 } | 5989 } |
| 5993 | 5990 |
| 5994 | 5991 |
| 5995 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { | 5992 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { |
| 5996 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( | 5993 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( |
| 5997 isolate); | 5994 isolate); |
| 5998 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( | 5995 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( |
| 5999 isolate); | 5996 isolate); |
| 6000 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>( | 5997 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>( |
| 6001 isolate); | 5998 isolate); |
| 6002 } | 5999 } |
| 6003 | 6000 |
| 6004 | 6001 |
| 6005 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime( | 6002 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime( |
| 6006 Isolate* isolate) { | 6003 Isolate* isolate) { |
| 6007 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS }; | 6004 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS }; |
| 6008 for (int i = 0; i < 2; i++) { | 6005 for (int i = 0; i < 2; i++) { |
| 6009 // For internal arrays we only need a few things. | 6006 // For internal arrays we only need a few things. |
| 6010 InternalArrayNoArgumentConstructorStub stubh1(kinds[i]); | 6007 InternalArrayNoArgumentConstructorStub stubh1(kinds[i]); |
| 6011 stubh1.GetCode(isolate)->set_is_pregenerated(true); | 6008 stubh1.GetCode(isolate); |
| 6012 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); | 6009 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); |
| 6013 stubh2.GetCode(isolate)->set_is_pregenerated(true); | 6010 stubh2.GetCode(isolate); |
| 6014 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); | 6011 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); |
| 6015 stubh3.GetCode(isolate)->set_is_pregenerated(true); | 6012 stubh3.GetCode(isolate); |
| 6016 } | 6013 } |
| 6017 } | 6014 } |
| 6018 | 6015 |
| 6019 | 6016 |
| 6020 void ArrayConstructorStub::GenerateDispatchToArrayStub( | 6017 void ArrayConstructorStub::GenerateDispatchToArrayStub( |
| 6021 MacroAssembler* masm, | 6018 MacroAssembler* masm, |
| 6022 AllocationSiteOverrideMode mode) { | 6019 AllocationSiteOverrideMode mode) { |
| 6023 if (argument_count_ == ANY) { | 6020 if (argument_count_ == ANY) { |
| 6024 Label not_zero_case, not_one_case; | 6021 Label not_zero_case, not_one_case; |
| 6025 __ And(at, a0, a0); | 6022 __ And(at, a0, a0); |
| (...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6181 __ bind(&fast_elements_case); | 6178 __ bind(&fast_elements_case); |
| 6182 GenerateCase(masm, FAST_ELEMENTS); | 6179 GenerateCase(masm, FAST_ELEMENTS); |
| 6183 } | 6180 } |
| 6184 | 6181 |
| 6185 | 6182 |
| 6186 #undef __ | 6183 #undef __ |
| 6187 | 6184 |
| 6188 } } // namespace v8::internal | 6185 } } // namespace v8::internal |
| 6189 | 6186 |
| 6190 #endif // V8_TARGET_ARCH_MIPS | 6187 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |