OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_IA32 | 5 #if V8_TARGET_ARCH_IA32 |
6 | 6 |
7 #include "src/crankshaft/ia32/lithium-codegen-ia32.h" | 7 #include "src/crankshaft/ia32/lithium-codegen-ia32.h" |
8 | 8 |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/code-factory.h" | 10 #include "src/code-factory.h" |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
50 bool LCodeGen::GenerateCode() { | 50 bool LCodeGen::GenerateCode() { |
51 LPhase phase("Z_Code generation", chunk()); | 51 LPhase phase("Z_Code generation", chunk()); |
52 DCHECK(is_unused()); | 52 DCHECK(is_unused()); |
53 status_ = GENERATING; | 53 status_ = GENERATING; |
54 | 54 |
55 // Open a frame scope to indicate that there is a frame on the stack. The | 55 // Open a frame scope to indicate that there is a frame on the stack. The |
56 // MANUAL indicates that the scope shouldn't actually generate code to set up | 56 // MANUAL indicates that the scope shouldn't actually generate code to set up |
57 // the frame (that is done in GeneratePrologue). | 57 // the frame (that is done in GeneratePrologue). |
58 FrameScope frame_scope(masm_, StackFrame::MANUAL); | 58 FrameScope frame_scope(masm_, StackFrame::MANUAL); |
59 | 59 |
60 support_aligned_spilled_doubles_ = info()->IsOptimizing(); | |
61 | |
62 dynamic_frame_alignment_ = info()->IsOptimizing() && | |
63 ((chunk()->num_double_slots() > 2 && | |
64 !chunk()->graph()->is_recursive()) || | |
65 !info()->osr_ast_id().IsNone()); | |
66 | |
67 return GeneratePrologue() && | 60 return GeneratePrologue() && |
68 GenerateBody() && | 61 GenerateBody() && |
69 GenerateDeferredCode() && | 62 GenerateDeferredCode() && |
70 GenerateJumpTable() && | 63 GenerateJumpTable() && |
71 GenerateSafepointTable(); | 64 GenerateSafepointTable(); |
72 } | 65 } |
73 | 66 |
74 | 67 |
75 void LCodeGen::FinishCode(Handle<Code> code) { | 68 void LCodeGen::FinishCode(Handle<Code> code) { |
76 DCHECK(is_done()); | 69 DCHECK(is_done()); |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
123 count++; | 116 count++; |
124 } | 117 } |
125 } | 118 } |
126 | 119 |
127 | 120 |
128 bool LCodeGen::GeneratePrologue() { | 121 bool LCodeGen::GeneratePrologue() { |
129 DCHECK(is_generating()); | 122 DCHECK(is_generating()); |
130 | 123 |
131 if (info()->IsOptimizing()) { | 124 if (info()->IsOptimizing()) { |
132 ProfileEntryHookStub::MaybeCallEntryHook(masm_); | 125 ProfileEntryHookStub::MaybeCallEntryHook(masm_); |
133 | |
134 if (support_aligned_spilled_doubles_ && dynamic_frame_alignment_) { | |
135 // Move state of dynamic frame alignment into edx. | |
136 __ Move(edx, Immediate(kNoAlignmentPadding)); | |
137 | |
138 Label do_not_pad, align_loop; | |
139 STATIC_ASSERT(kDoubleSize == 2 * kPointerSize); | |
140 // Align esp + 4 to a multiple of 2 * kPointerSize. | |
141 __ test(esp, Immediate(kPointerSize)); | |
142 __ j(not_zero, &do_not_pad, Label::kNear); | |
143 __ push(Immediate(0)); | |
144 __ mov(ebx, esp); | |
145 __ mov(edx, Immediate(kAlignmentPaddingPushed)); | |
146 // Copy arguments, receiver, and return address. | |
147 __ mov(ecx, Immediate(scope()->num_parameters() + 2)); | |
148 | |
149 __ bind(&align_loop); | |
150 __ mov(eax, Operand(ebx, 1 * kPointerSize)); | |
151 __ mov(Operand(ebx, 0), eax); | |
152 __ add(Operand(ebx), Immediate(kPointerSize)); | |
153 __ dec(ecx); | |
154 __ j(not_zero, &align_loop, Label::kNear); | |
155 __ mov(Operand(ebx, 0), Immediate(kAlignmentZapValue)); | |
156 __ bind(&do_not_pad); | |
157 } | |
158 } | 126 } |
159 | 127 |
160 info()->set_prologue_offset(masm_->pc_offset()); | 128 info()->set_prologue_offset(masm_->pc_offset()); |
161 if (NeedsEagerFrame()) { | 129 if (NeedsEagerFrame()) { |
162 DCHECK(!frame_is_built_); | 130 DCHECK(!frame_is_built_); |
163 frame_is_built_ = true; | 131 frame_is_built_ = true; |
164 if (info()->IsStub()) { | 132 if (info()->IsStub()) { |
165 __ StubPrologue(); | 133 __ StubPrologue(); |
166 } else { | 134 } else { |
167 __ Prologue(info()->GeneratePreagedPrologue()); | 135 __ Prologue(info()->GeneratePreagedPrologue()); |
168 } | 136 } |
169 } | 137 } |
170 | 138 |
171 if (info()->IsOptimizing() && | |
172 dynamic_frame_alignment_ && | |
173 FLAG_debug_code) { | |
174 __ test(esp, Immediate(kPointerSize)); | |
175 __ Assert(zero, kFrameIsExpectedToBeAligned); | |
176 } | |
177 | |
178 // Reserve space for the stack slots needed by the code. | 139 // Reserve space for the stack slots needed by the code. |
179 int slots = GetStackSlotCount(); | 140 int slots = GetStackSlotCount(); |
180 DCHECK(slots != 0 || !info()->IsOptimizing()); | 141 DCHECK(slots != 0 || !info()->IsOptimizing()); |
181 if (slots > 0) { | 142 if (slots > 0) { |
182 if (slots == 1) { | 143 __ sub(Operand(esp), Immediate(slots * kPointerSize)); |
183 if (dynamic_frame_alignment_) { | |
184 __ push(edx); | |
185 } else { | |
186 __ push(Immediate(kNoAlignmentPadding)); | |
187 } | |
188 } else { | |
189 if (FLAG_debug_code) { | |
190 __ sub(Operand(esp), Immediate(slots * kPointerSize)); | |
191 #ifdef _MSC_VER | 144 #ifdef _MSC_VER |
192 MakeSureStackPagesMapped(slots * kPointerSize); | 145 MakeSureStackPagesMapped(slots * kPointerSize); |
193 #endif | 146 #endif |
194 __ push(eax); | 147 if (FLAG_debug_code) { |
195 __ mov(Operand(eax), Immediate(slots)); | 148 __ push(eax); |
196 Label loop; | 149 __ mov(Operand(eax), Immediate(slots)); |
197 __ bind(&loop); | 150 Label loop; |
198 __ mov(MemOperand(esp, eax, times_4, 0), | 151 __ bind(&loop); |
199 Immediate(kSlotsZapValue)); | 152 __ mov(MemOperand(esp, eax, times_4, 0), Immediate(kSlotsZapValue)); |
200 __ dec(eax); | 153 __ dec(eax); |
201 __ j(not_zero, &loop); | 154 __ j(not_zero, &loop); |
202 __ pop(eax); | 155 __ pop(eax); |
203 } else { | |
204 __ sub(Operand(esp), Immediate(slots * kPointerSize)); | |
205 #ifdef _MSC_VER | |
206 MakeSureStackPagesMapped(slots * kPointerSize); | |
207 #endif | |
208 } | |
209 | |
210 if (support_aligned_spilled_doubles_) { | |
211 Comment(";;; Store dynamic frame alignment tag for spilled doubles"); | |
212 // Store dynamic frame alignment state in the first local. | |
213 int offset = JavaScriptFrameConstants::kDynamicAlignmentStateOffset; | |
214 if (dynamic_frame_alignment_) { | |
215 __ mov(Operand(ebp, offset), edx); | |
216 } else { | |
217 __ mov(Operand(ebp, offset), Immediate(kNoAlignmentPadding)); | |
218 } | |
219 } | |
220 } | 156 } |
221 | 157 |
222 if (info()->saves_caller_doubles()) SaveCallerDoubles(); | 158 if (info()->saves_caller_doubles()) SaveCallerDoubles(); |
223 } | 159 } |
224 return !is_aborted(); | 160 return !is_aborted(); |
225 } | 161 } |
226 | 162 |
227 | 163 |
228 void LCodeGen::DoPrologue(LPrologue* instr) { | 164 void LCodeGen::DoPrologue(LPrologue* instr) { |
229 Comment(";;; Prologue begin"); | 165 Comment(";;; Prologue begin"); |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
291 } | 227 } |
292 | 228 |
293 | 229 |
294 void LCodeGen::GenerateOsrPrologue() { | 230 void LCodeGen::GenerateOsrPrologue() { |
295 // Generate the OSR entry prologue at the first unknown OSR value, or if there | 231 // Generate the OSR entry prologue at the first unknown OSR value, or if there |
296 // are none, at the OSR entrypoint instruction. | 232 // are none, at the OSR entrypoint instruction. |
297 if (osr_pc_offset_ >= 0) return; | 233 if (osr_pc_offset_ >= 0) return; |
298 | 234 |
299 osr_pc_offset_ = masm()->pc_offset(); | 235 osr_pc_offset_ = masm()->pc_offset(); |
300 | 236 |
301 // Move state of dynamic frame alignment into edx. | |
302 __ Move(edx, Immediate(kNoAlignmentPadding)); | |
303 | |
304 if (support_aligned_spilled_doubles_ && dynamic_frame_alignment_) { | |
305 Label do_not_pad, align_loop; | |
306 // Align ebp + 4 to a multiple of 2 * kPointerSize. | |
307 __ test(ebp, Immediate(kPointerSize)); | |
308 __ j(zero, &do_not_pad, Label::kNear); | |
309 __ push(Immediate(0)); | |
310 __ mov(ebx, esp); | |
311 __ mov(edx, Immediate(kAlignmentPaddingPushed)); | |
312 | |
313 // Move all parts of the frame over one word. The frame consists of: | |
314 // unoptimized frame slots, alignment state, context, frame pointer, return | |
315 // address, receiver, and the arguments. | |
316 __ mov(ecx, Immediate(scope()->num_parameters() + | |
317 5 + graph()->osr()->UnoptimizedFrameSlots())); | |
318 | |
319 __ bind(&align_loop); | |
320 __ mov(eax, Operand(ebx, 1 * kPointerSize)); | |
321 __ mov(Operand(ebx, 0), eax); | |
322 __ add(Operand(ebx), Immediate(kPointerSize)); | |
323 __ dec(ecx); | |
324 __ j(not_zero, &align_loop, Label::kNear); | |
325 __ mov(Operand(ebx, 0), Immediate(kAlignmentZapValue)); | |
326 __ sub(Operand(ebp), Immediate(kPointerSize)); | |
327 __ bind(&do_not_pad); | |
328 } | |
329 | |
330 // Save the first local, which is overwritten by the alignment state. | |
331 Operand alignment_loc = MemOperand(ebp, -3 * kPointerSize); | |
332 __ push(alignment_loc); | |
333 | |
334 // Set the dynamic frame alignment state. | |
335 __ mov(alignment_loc, edx); | |
336 | |
337 // Adjust the frame size, subsuming the unoptimized frame into the | 237 // Adjust the frame size, subsuming the unoptimized frame into the |
338 // optimized frame. | 238 // optimized frame. |
339 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots(); | 239 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots(); |
340 DCHECK(slots >= 1); | 240 DCHECK(slots >= 0); |
341 __ sub(esp, Immediate((slots - 1) * kPointerSize)); | 241 __ sub(esp, Immediate(slots * kPointerSize)); |
342 } | 242 } |
343 | 243 |
344 | 244 |
345 void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { | 245 void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { |
346 if (instr->IsCall()) { | 246 if (instr->IsCall()) { |
347 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 247 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
348 } | 248 } |
349 if (!instr->IsLazyBailout() && !instr->IsGap()) { | 249 if (!instr->IsLazyBailout() && !instr->IsGap()) { |
350 safepoints_.BumpLastLazySafepointIndex(); | 250 safepoints_.BumpLastLazySafepointIndex(); |
351 } | 251 } |
(...skipping 2103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2455 Label true_value, done; | 2355 Label true_value, done; |
2456 __ test(eax, Operand(eax)); | 2356 __ test(eax, Operand(eax)); |
2457 __ j(condition, &true_value, Label::kNear); | 2357 __ j(condition, &true_value, Label::kNear); |
2458 __ mov(ToRegister(instr->result()), factory()->false_value()); | 2358 __ mov(ToRegister(instr->result()), factory()->false_value()); |
2459 __ jmp(&done, Label::kNear); | 2359 __ jmp(&done, Label::kNear); |
2460 __ bind(&true_value); | 2360 __ bind(&true_value); |
2461 __ mov(ToRegister(instr->result()), factory()->true_value()); | 2361 __ mov(ToRegister(instr->result()), factory()->true_value()); |
2462 __ bind(&done); | 2362 __ bind(&done); |
2463 } | 2363 } |
2464 | 2364 |
2465 | 2365 void LCodeGen::EmitReturn(LReturn* instr) { |
2466 void LCodeGen::EmitReturn(LReturn* instr, bool dynamic_frame_alignment) { | 2366 int extra_value_count = 1; |
2467 int extra_value_count = dynamic_frame_alignment ? 2 : 1; | |
2468 | 2367 |
2469 if (instr->has_constant_parameter_count()) { | 2368 if (instr->has_constant_parameter_count()) { |
2470 int parameter_count = ToInteger32(instr->constant_parameter_count()); | 2369 int parameter_count = ToInteger32(instr->constant_parameter_count()); |
2471 if (dynamic_frame_alignment && FLAG_debug_code) { | |
2472 __ cmp(Operand(esp, | |
2473 (parameter_count + extra_value_count) * kPointerSize), | |
2474 Immediate(kAlignmentZapValue)); | |
2475 __ Assert(equal, kExpectedAlignmentMarker); | |
2476 } | |
2477 __ Ret((parameter_count + extra_value_count) * kPointerSize, ecx); | 2370 __ Ret((parameter_count + extra_value_count) * kPointerSize, ecx); |
2478 } else { | 2371 } else { |
2479 DCHECK(info()->IsStub()); // Functions would need to drop one more value. | 2372 DCHECK(info()->IsStub()); // Functions would need to drop one more value. |
2480 Register reg = ToRegister(instr->parameter_count()); | 2373 Register reg = ToRegister(instr->parameter_count()); |
2481 // The argument count parameter is a smi | 2374 // The argument count parameter is a smi |
2482 __ SmiUntag(reg); | 2375 __ SmiUntag(reg); |
2483 Register return_addr_reg = reg.is(ecx) ? ebx : ecx; | 2376 Register return_addr_reg = reg.is(ecx) ? ebx : ecx; |
2484 if (dynamic_frame_alignment && FLAG_debug_code) { | |
2485 DCHECK(extra_value_count == 2); | |
2486 __ cmp(Operand(esp, reg, times_pointer_size, | |
2487 extra_value_count * kPointerSize), | |
2488 Immediate(kAlignmentZapValue)); | |
2489 __ Assert(equal, kExpectedAlignmentMarker); | |
2490 } | |
2491 | 2377 |
2492 // emit code to restore stack based on instr->parameter_count() | 2378 // emit code to restore stack based on instr->parameter_count() |
2493 __ pop(return_addr_reg); // save return address | 2379 __ pop(return_addr_reg); // save return address |
2494 if (dynamic_frame_alignment) { | |
2495 __ inc(reg); // 1 more for alignment | |
2496 } | |
2497 | |
2498 __ shl(reg, kPointerSizeLog2); | 2380 __ shl(reg, kPointerSizeLog2); |
2499 __ add(esp, reg); | 2381 __ add(esp, reg); |
2500 __ jmp(return_addr_reg); | 2382 __ jmp(return_addr_reg); |
2501 } | 2383 } |
2502 } | 2384 } |
2503 | 2385 |
2504 | 2386 |
2505 void LCodeGen::DoReturn(LReturn* instr) { | 2387 void LCodeGen::DoReturn(LReturn* instr) { |
2506 if (FLAG_trace && info()->IsOptimizing()) { | 2388 if (FLAG_trace && info()->IsOptimizing()) { |
2507 // Preserve the return value on the stack and rely on the runtime call | 2389 // Preserve the return value on the stack and rely on the runtime call |
2508 // to return the value in the same register. We're leaving the code | 2390 // to return the value in the same register. We're leaving the code |
2509 // managed by the register allocator and tearing down the frame, it's | 2391 // managed by the register allocator and tearing down the frame, it's |
2510 // safe to write to the context register. | 2392 // safe to write to the context register. |
2511 __ push(eax); | 2393 __ push(eax); |
2512 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 2394 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
2513 __ CallRuntime(Runtime::kTraceExit); | 2395 __ CallRuntime(Runtime::kTraceExit); |
2514 } | 2396 } |
2515 if (info()->saves_caller_doubles()) RestoreCallerDoubles(); | 2397 if (info()->saves_caller_doubles()) RestoreCallerDoubles(); |
2516 if (dynamic_frame_alignment_) { | |
2517 // Fetch the state of the dynamic frame alignment. | |
2518 __ mov(edx, Operand(ebp, | |
2519 JavaScriptFrameConstants::kDynamicAlignmentStateOffset)); | |
2520 } | |
2521 if (NeedsEagerFrame()) { | 2398 if (NeedsEagerFrame()) { |
2522 __ mov(esp, ebp); | 2399 __ mov(esp, ebp); |
2523 __ pop(ebp); | 2400 __ pop(ebp); |
2524 } | 2401 } |
2525 if (dynamic_frame_alignment_) { | |
2526 Label no_padding; | |
2527 __ cmp(edx, Immediate(kNoAlignmentPadding)); | |
2528 __ j(equal, &no_padding, Label::kNear); | |
2529 | 2402 |
2530 EmitReturn(instr, true); | 2403 EmitReturn(instr); |
2531 __ bind(&no_padding); | |
2532 } | |
2533 | |
2534 EmitReturn(instr, false); | |
2535 } | 2404 } |
2536 | 2405 |
2537 | 2406 |
2538 template <class T> | 2407 template <class T> |
2539 void LCodeGen::EmitVectorLoadICRegisters(T* instr) { | 2408 void LCodeGen::EmitVectorLoadICRegisters(T* instr) { |
2540 Register vector_register = ToRegister(instr->temp_vector()); | 2409 Register vector_register = ToRegister(instr->temp_vector()); |
2541 Register slot_register = LoadWithVectorDescriptor::SlotRegister(); | 2410 Register slot_register = LoadWithVectorDescriptor::SlotRegister(); |
2542 DCHECK(vector_register.is(LoadWithVectorDescriptor::VectorRegister())); | 2411 DCHECK(vector_register.is(LoadWithVectorDescriptor::VectorRegister())); |
2543 DCHECK(slot_register.is(eax)); | 2412 DCHECK(slot_register.is(eax)); |
2544 | 2413 |
(...skipping 2811 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5356 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), context); | 5225 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), context); |
5357 } | 5226 } |
5358 | 5227 |
5359 | 5228 |
5360 #undef __ | 5229 #undef __ |
5361 | 5230 |
5362 } // namespace internal | 5231 } // namespace internal |
5363 } // namespace v8 | 5232 } // namespace v8 |
5364 | 5233 |
5365 #endif // V8_TARGET_ARCH_IA32 | 5234 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |