| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "v8.h" | 5 #include "v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_IA32 | 7 #if V8_TARGET_ARCH_IA32 |
| 8 | 8 |
| 9 #include "bootstrapper.h" | 9 #include "bootstrapper.h" |
| 10 #include "code-stubs.h" | 10 #include "code-stubs.h" |
| (...skipping 2475 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2486 } | 2486 } |
| 2487 | 2487 |
| 2488 | 2488 |
| 2489 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 2489 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
| 2490 CEntryStub::GenerateAheadOfTime(isolate); | 2490 CEntryStub::GenerateAheadOfTime(isolate); |
| 2491 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 2491 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
| 2492 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 2492 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
| 2493 // It is important that the store buffer overflow stubs are generated first. | 2493 // It is important that the store buffer overflow stubs are generated first. |
| 2494 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); | 2494 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
| 2495 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); | 2495 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); |
| 2496 if (Serializer::enabled()) { | 2496 if (Serializer::enabled(isolate)) { |
| 2497 PlatformFeatureScope sse2(isolate, SSE2); | 2497 PlatformFeatureScope sse2(isolate, SSE2); |
| 2498 BinaryOpICStub::GenerateAheadOfTime(isolate); | 2498 BinaryOpICStub::GenerateAheadOfTime(isolate); |
| 2499 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); | 2499 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); |
| 2500 } else { | 2500 } else { |
| 2501 BinaryOpICStub::GenerateAheadOfTime(isolate); | 2501 BinaryOpICStub::GenerateAheadOfTime(isolate); |
| 2502 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); | 2502 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); |
| 2503 } | 2503 } |
| 2504 } | 2504 } |
| 2505 | 2505 |
| 2506 | 2506 |
| (...skipping 621 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3128 | 3128 |
| 3129 __ bind(&done); | 3129 __ bind(&done); |
| 3130 } | 3130 } |
| 3131 | 3131 |
| 3132 | 3132 |
| 3133 void StringHelper::GenerateHashInit(MacroAssembler* masm, | 3133 void StringHelper::GenerateHashInit(MacroAssembler* masm, |
| 3134 Register hash, | 3134 Register hash, |
| 3135 Register character, | 3135 Register character, |
| 3136 Register scratch) { | 3136 Register scratch) { |
| 3137 // hash = (seed + character) + ((seed + character) << 10); | 3137 // hash = (seed + character) + ((seed + character) << 10); |
| 3138 if (Serializer::enabled()) { | 3138 if (Serializer::enabled(masm->isolate())) { |
| 3139 __ LoadRoot(scratch, Heap::kHashSeedRootIndex); | 3139 __ LoadRoot(scratch, Heap::kHashSeedRootIndex); |
| 3140 __ SmiUntag(scratch); | 3140 __ SmiUntag(scratch); |
| 3141 __ add(scratch, character); | 3141 __ add(scratch, character); |
| 3142 __ mov(hash, scratch); | 3142 __ mov(hash, scratch); |
| 3143 __ shl(scratch, 10); | 3143 __ shl(scratch, 10); |
| 3144 __ add(hash, scratch); | 3144 __ add(hash, scratch); |
| 3145 } else { | 3145 } else { |
| 3146 int32_t seed = masm->isolate()->heap()->HashSeed(); | 3146 int32_t seed = masm->isolate()->heap()->HashSeed(); |
| 3147 __ lea(scratch, Operand(character, seed)); | 3147 __ lea(scratch, Operand(character, seed)); |
| 3148 __ shl(scratch, 10); | 3148 __ shl(scratch, 10); |
| (...skipping 1055 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4204 __ mov(result_, Immediate(0)); | 4204 __ mov(result_, Immediate(0)); |
| 4205 __ Drop(1); | 4205 __ Drop(1); |
| 4206 __ ret(2 * kPointerSize); | 4206 __ ret(2 * kPointerSize); |
| 4207 } | 4207 } |
| 4208 | 4208 |
| 4209 | 4209 |
| 4210 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( | 4210 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( |
| 4211 Isolate* isolate) { | 4211 Isolate* isolate) { |
| 4212 StoreBufferOverflowStub stub(isolate, kDontSaveFPRegs); | 4212 StoreBufferOverflowStub stub(isolate, kDontSaveFPRegs); |
| 4213 stub.GetCode(); | 4213 stub.GetCode(); |
| 4214 if (CpuFeatures::IsSafeForSnapshot(SSE2)) { | 4214 if (CpuFeatures::IsSafeForSnapshot(isolate, SSE2)) { |
| 4215 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs); | 4215 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs); |
| 4216 stub2.GetCode(); | 4216 stub2.GetCode(); |
| 4217 } | 4217 } |
| 4218 } | 4218 } |
| 4219 | 4219 |
| 4220 | 4220 |
| 4221 bool CodeStub::CanUseFPRegisters() { | 4221 bool CodeStub::CanUseFPRegisters() { |
| 4222 return CpuFeatures::IsSupported(SSE2); | 4222 return CpuFeatures::IsSupported(SSE2); |
| 4223 } | 4223 } |
| 4224 | 4224 |
| (...skipping 798 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5023 Operand(ebp, 7 * kPointerSize), | 5023 Operand(ebp, 7 * kPointerSize), |
| 5024 NULL); | 5024 NULL); |
| 5025 } | 5025 } |
| 5026 | 5026 |
| 5027 | 5027 |
| 5028 #undef __ | 5028 #undef __ |
| 5029 | 5029 |
| 5030 } } // namespace v8::internal | 5030 } } // namespace v8::internal |
| 5031 | 5031 |
| 5032 #endif // V8_TARGET_ARCH_IA32 | 5032 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |