Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 841 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 852 } else { | 852 } else { |
| 853 __ Strd(r0, r1, FieldMemOperand(heap_number_result, | 853 __ Strd(r0, r1, FieldMemOperand(heap_number_result, |
| 854 HeapNumber::kValueOffset)); | 854 HeapNumber::kValueOffset)); |
| 855 } | 855 } |
| 856 // Place heap_number_result in r0 and return to the pushed return address. | 856 // Place heap_number_result in r0 and return to the pushed return address. |
| 857 __ mov(r0, Operand(heap_number_result)); | 857 __ mov(r0, Operand(heap_number_result)); |
| 858 __ pop(pc); | 858 __ pop(pc); |
| 859 } | 859 } |
| 860 | 860 |
| 861 | 861 |
| 862 bool WriteInt32ToHeapNumberStub::CompilingCallsToThisStubIsGCSafe() { | 862 bool WriteInt32ToHeapNumberStub::IsPregenerated() { |
| 863 // These variants are compiled ahead of time. See next method. | 863 // These variants are compiled ahead of time. See next method. |
| 864 if (the_int_.is(r1) && the_heap_number_.is(r0) && scratch_.is(r2)) { | 864 if (the_int_.is(r1) && the_heap_number_.is(r0) && scratch_.is(r2)) { |
| 865 return true; | 865 return true; |
| 866 } | 866 } |
| 867 if (the_int_.is(r2) && the_heap_number_.is(r0) && scratch_.is(r3)) { | 867 if (the_int_.is(r2) && the_heap_number_.is(r0) && scratch_.is(r3)) { |
| 868 return true; | 868 return true; |
| 869 } | 869 } |
| 870 // Other register combinations are generated as and when they are needed, | 870 // Other register combinations are generated as and when they are needed, |
| 871 // so it is unsafe to call them from stubs (we can't generate a stub while | 871 // so it is unsafe to call them from stubs (we can't generate a stub while |
| 872 // we are generating a stub). | 872 // we are generating a stub). |
| (...skipping 1214 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2087 } | 2087 } |
| 2088 | 2088 |
| 2089 | 2089 |
| 2090 void BinaryOpStub::GenerateTypeTransitionWithSavedArgs( | 2090 void BinaryOpStub::GenerateTypeTransitionWithSavedArgs( |
| 2091 MacroAssembler* masm) { | 2091 MacroAssembler* masm) { |
| 2092 UNIMPLEMENTED(); | 2092 UNIMPLEMENTED(); |
| 2093 } | 2093 } |
| 2094 | 2094 |
| 2095 | 2095 |
| 2096 void BinaryOpStub::Generate(MacroAssembler* masm) { | 2096 void BinaryOpStub::Generate(MacroAssembler* masm) { |
| 2097 // Explicitly allow generation of nested stubs. It is safe here. | |
|
Erik Corry
2011/09/26 22:47:26
Can the comment explain in more detail why this is
| |
| 2098 AllowStubCallsScope allow_stub_calls(masm, true); | |
| 2099 | |
| 2097 switch (operands_type_) { | 2100 switch (operands_type_) { |
| 2098 case BinaryOpIC::UNINITIALIZED: | 2101 case BinaryOpIC::UNINITIALIZED: |
| 2099 GenerateTypeTransition(masm); | 2102 GenerateTypeTransition(masm); |
| 2100 break; | 2103 break; |
| 2101 case BinaryOpIC::SMI: | 2104 case BinaryOpIC::SMI: |
| 2102 GenerateSmiStub(masm); | 2105 GenerateSmiStub(masm); |
| 2103 break; | 2106 break; |
| 2104 case BinaryOpIC::INT32: | 2107 case BinaryOpIC::INT32: |
| 2105 GenerateInt32Stub(masm); | 2108 GenerateInt32Stub(masm); |
| 2106 break; | 2109 break; |
| (...skipping 1280 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3387 __ bind(&call_runtime); | 3390 __ bind(&call_runtime); |
| 3388 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); | 3391 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); |
| 3389 } | 3392 } |
| 3390 | 3393 |
| 3391 | 3394 |
| 3392 bool CEntryStub::NeedsImmovableCode() { | 3395 bool CEntryStub::NeedsImmovableCode() { |
| 3393 return true; | 3396 return true; |
| 3394 } | 3397 } |
| 3395 | 3398 |
| 3396 | 3399 |
| 3397 bool CEntryStub::CompilingCallsToThisStubIsGCSafe() { | 3400 bool CEntryStub::IsPregenerated() { |
| 3398 return (!save_doubles_ || ISOLATE->fp_stubs_generated()) && | 3401 return (!save_doubles_ || ISOLATE->fp_stubs_generated()) && |
| 3399 result_size_ == 1; | 3402 result_size_ == 1; |
| 3400 } | 3403 } |
| 3401 | 3404 |
| 3402 | 3405 |
| 3403 void CodeStub::GenerateStubsAheadOfTime() { | 3406 void CodeStub::GenerateStubsAheadOfTime() { |
| 3404 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(); | 3407 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(); |
| 3408 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(); | |
| 3409 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(); | |
| 3405 } | 3410 } |
| 3406 | 3411 |
| 3407 | 3412 |
| 3408 void CodeStub::GenerateFPStubs() { | 3413 void CodeStub::GenerateFPStubs() { |
| 3409 CEntryStub save_doubles(1, kSaveFPRegs); | 3414 CEntryStub save_doubles(1, kSaveFPRegs); |
| 3410 Handle<Code> code = save_doubles.GetCode(); | 3415 Handle<Code> code = save_doubles.GetCode(); |
| 3411 code->GetIsolate()->set_fp_stubs_generated(true); | 3416 code->GetIsolate()->set_fp_stubs_generated(true); |
| 3412 } | 3417 } |
| 3413 | 3418 |
| 3414 | 3419 |
| (...skipping 3415 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 6830 // Used in KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField. | 6835 // Used in KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField. |
| 6831 { r2, r1, r3, EMIT_REMEMBERED_SET }, | 6836 { r2, r1, r3, EMIT_REMEMBERED_SET }, |
| 6832 { r3, r1, r2, EMIT_REMEMBERED_SET }, | 6837 { r3, r1, r2, EMIT_REMEMBERED_SET }, |
| 6833 // KeyedStoreStubCompiler::GenerateStoreFastElement. | 6838 // KeyedStoreStubCompiler::GenerateStoreFastElement. |
| 6834 { r4, r2, r3, EMIT_REMEMBERED_SET }, | 6839 { r4, r2, r3, EMIT_REMEMBERED_SET }, |
| 6835 // Null termination. | 6840 // Null termination. |
| 6836 { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET} | 6841 { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET} |
| 6837 }; | 6842 }; |
| 6838 | 6843 |
| 6839 | 6844 |
| 6840 bool RecordWriteStub::CompilingCallsToThisStubIsGCSafe() { | 6845 bool RecordWriteStub::IsPregenerated() { |
| 6841 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; | 6846 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; |
| 6842 !entry->object.is(no_reg); | 6847 !entry->object.is(no_reg); |
| 6843 entry++) { | 6848 entry++) { |
| 6844 if (object_.is(entry->object) && | 6849 if (object_.is(entry->object) && |
| 6845 value_.is(entry->value) && | 6850 value_.is(entry->value) && |
| 6846 address_.is(entry->address) && | 6851 address_.is(entry->address) && |
| 6847 remembered_set_action_ == entry->action && | 6852 remembered_set_action_ == entry->action && |
| 6848 save_fp_regs_mode_ == kDontSaveFPRegs) { | 6853 save_fp_regs_mode_ == kDontSaveFPRegs) { |
| 6849 return true; | 6854 return true; |
| 6850 } | 6855 } |
| (...skipping 213 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 7064 | 7069 |
| 7065 // Fall through when we need to inform the incremental marker. | 7070 // Fall through when we need to inform the incremental marker. |
| 7066 } | 7071 } |
| 7067 | 7072 |
| 7068 | 7073 |
| 7069 #undef __ | 7074 #undef __ |
| 7070 | 7075 |
| 7071 } } // namespace v8::internal | 7076 } } // namespace v8::internal |
| 7072 | 7077 |
| 7073 #endif // V8_TARGET_ARCH_ARM | 7078 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |