OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 15 matching lines...) Expand all Loading... |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #ifndef V8_IA32_LITHIUM_CODEGEN_IA32_H_ | 28 #ifndef V8_IA32_LITHIUM_CODEGEN_IA32_H_ |
29 #define V8_IA32_LITHIUM_CODEGEN_IA32_H_ | 29 #define V8_IA32_LITHIUM_CODEGEN_IA32_H_ |
30 | 30 |
31 #include "ia32/lithium-ia32.h" | 31 #include "ia32/lithium-ia32.h" |
32 | 32 |
33 #include "checks.h" | 33 #include "checks.h" |
34 #include "deoptimizer.h" | 34 #include "deoptimizer.h" |
35 #include "ia32/lithium-gap-resolver-ia32.h" | 35 #include "ia32/lithium-gap-resolver-ia32.h" |
| 36 #include "lithium-codegen.h" |
36 #include "safepoint-table.h" | 37 #include "safepoint-table.h" |
37 #include "scopes.h" | 38 #include "scopes.h" |
38 #include "v8utils.h" | 39 #include "v8utils.h" |
39 | 40 |
40 namespace v8 { | 41 namespace v8 { |
41 namespace internal { | 42 namespace internal { |
42 | 43 |
43 // Forward declarations. | 44 // Forward declarations. |
44 class LDeferredCode; | 45 class LDeferredCode; |
45 class LGapNode; | 46 class LGapNode; |
46 class SafepointGenerator; | 47 class SafepointGenerator; |
47 | 48 |
48 class LCodeGen V8_FINAL BASE_EMBEDDED { | 49 class LCodeGen: public LCodeGenBase { |
49 public: | 50 public: |
50 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) | 51 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) |
51 : zone_(info->zone()), | 52 : LCodeGenBase(chunk, assembler, info), |
52 chunk_(static_cast<LPlatformChunk*>(chunk)), | |
53 masm_(assembler), | |
54 info_(info), | |
55 current_block_(-1), | |
56 current_instruction_(-1), | |
57 instructions_(chunk->instructions()), | |
58 deoptimizations_(4, info->zone()), | 53 deoptimizations_(4, info->zone()), |
59 jump_table_(4, info->zone()), | 54 jump_table_(4, info->zone()), |
60 deoptimization_literals_(8, info->zone()), | 55 deoptimization_literals_(8, info->zone()), |
61 inlined_function_count_(0), | 56 inlined_function_count_(0), |
62 scope_(info->scope()), | 57 scope_(info->scope()), |
63 status_(UNUSED), | |
64 translations_(info->zone()), | 58 translations_(info->zone()), |
65 deferred_(8, info->zone()), | 59 deferred_(8, info->zone()), |
66 dynamic_frame_alignment_(false), | 60 dynamic_frame_alignment_(false), |
67 support_aligned_spilled_doubles_(false), | 61 support_aligned_spilled_doubles_(false), |
68 osr_pc_offset_(-1), | 62 osr_pc_offset_(-1), |
69 last_lazy_deopt_pc_(0), | |
70 frame_is_built_(false), | 63 frame_is_built_(false), |
71 x87_stack_(assembler), | 64 x87_stack_(assembler), |
72 safepoints_(info->zone()), | 65 safepoints_(info->zone()), |
73 resolver_(this), | 66 resolver_(this), |
74 expected_safepoint_kind_(Safepoint::kSimple), | 67 expected_safepoint_kind_(Safepoint::kSimple), |
75 old_position_(RelocInfo::kNoPosition) { | 68 old_position_(RelocInfo::kNoPosition) { |
76 PopulateDeoptimizationLiteralsWithInlinedFunctions(); | 69 PopulateDeoptimizationLiteralsWithInlinedFunctions(); |
77 } | 70 } |
78 | 71 |
79 // Simple accessors. | |
80 MacroAssembler* masm() const { return masm_; } | |
81 CompilationInfo* info() const { return info_; } | |
82 Isolate* isolate() const { return info_->isolate(); } | |
83 Factory* factory() const { return isolate()->factory(); } | |
84 Heap* heap() const { return isolate()->heap(); } | |
85 Zone* zone() const { return zone_; } | |
86 | |
87 int LookupDestination(int block_id) const { | 72 int LookupDestination(int block_id) const { |
88 return chunk()->LookupDestination(block_id); | 73 return chunk()->LookupDestination(block_id); |
89 } | 74 } |
90 | 75 |
91 bool IsNextEmittedBlock(int block_id) const { | 76 bool IsNextEmittedBlock(int block_id) const { |
92 return LookupDestination(block_id) == GetNextEmittedBlock(); | 77 return LookupDestination(block_id) == GetNextEmittedBlock(); |
93 } | 78 } |
94 | 79 |
95 bool NeedsEagerFrame() const { | 80 bool NeedsEagerFrame() const { |
96 return GetStackSlotCount() > 0 || | 81 return GetStackSlotCount() > 0 || |
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
182 void WriteTranslation(LEnvironment* environment, Translation* translation); | 167 void WriteTranslation(LEnvironment* environment, Translation* translation); |
183 | 168 |
184 void EnsureRelocSpaceForDeoptimization(); | 169 void EnsureRelocSpaceForDeoptimization(); |
185 | 170 |
186 // Declare methods that deal with the individual node types. | 171 // Declare methods that deal with the individual node types. |
187 #define DECLARE_DO(type) void Do##type(L##type* node); | 172 #define DECLARE_DO(type) void Do##type(L##type* node); |
188 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) | 173 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) |
189 #undef DECLARE_DO | 174 #undef DECLARE_DO |
190 | 175 |
191 private: | 176 private: |
192 enum Status { | |
193 UNUSED, | |
194 GENERATING, | |
195 DONE, | |
196 ABORTED | |
197 }; | |
198 | |
199 bool is_unused() const { return status_ == UNUSED; } | |
200 bool is_generating() const { return status_ == GENERATING; } | |
201 bool is_done() const { return status_ == DONE; } | |
202 bool is_aborted() const { return status_ == ABORTED; } | |
203 | |
204 StrictModeFlag strict_mode_flag() const { | 177 StrictModeFlag strict_mode_flag() const { |
205 return info()->is_classic_mode() ? kNonStrictMode : kStrictMode; | 178 return info()->is_classic_mode() ? kNonStrictMode : kStrictMode; |
206 } | 179 } |
207 | 180 |
208 LPlatformChunk* chunk() const { return chunk_; } | |
209 Scope* scope() const { return scope_; } | 181 Scope* scope() const { return scope_; } |
210 HGraph* graph() const { return chunk()->graph(); } | |
211 | 182 |
212 XMMRegister double_scratch0() const { return xmm0; } | 183 XMMRegister double_scratch0() const { return xmm0; } |
213 | 184 |
214 int GetNextEmittedBlock() const; | |
215 | |
216 void EmitClassOfTest(Label* if_true, | 185 void EmitClassOfTest(Label* if_true, |
217 Label* if_false, | 186 Label* if_false, |
218 Handle<String> class_name, | 187 Handle<String> class_name, |
219 Register input, | 188 Register input, |
220 Register temporary, | 189 Register temporary, |
221 Register temporary2); | 190 Register temporary2); |
222 | 191 |
223 int GetStackSlotCount() const { return chunk()->spill_slot_count(); } | 192 int GetStackSlotCount() const { return chunk()->spill_slot_count(); } |
224 | 193 |
225 void Abort(BailoutReason reason); | 194 void Abort(BailoutReason reason); |
226 void FPRINTF_CHECKING Comment(const char* format, ...); | |
227 | 195 |
228 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } | 196 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } |
229 | 197 |
230 // Code generation passes. Returns true if code generation should | 198 // Code generation passes. Returns true if code generation should |
231 // continue. | 199 // continue. |
| 200 void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE; |
| 201 void GenerateBodyInstructionPost(LInstruction* instr) V8_OVERRIDE; |
232 bool GeneratePrologue(); | 202 bool GeneratePrologue(); |
233 bool GenerateBody(); | |
234 bool GenerateDeferredCode(); | 203 bool GenerateDeferredCode(); |
235 bool GenerateJumpTable(); | 204 bool GenerateJumpTable(); |
236 bool GenerateSafepointTable(); | 205 bool GenerateSafepointTable(); |
237 | 206 |
238 // Generates the custom OSR entrypoint and sets the osr_pc_offset. | 207 // Generates the custom OSR entrypoint and sets the osr_pc_offset. |
239 void GenerateOsrPrologue(); | 208 void GenerateOsrPrologue(); |
240 | 209 |
241 enum SafepointMode { | 210 enum SafepointMode { |
242 RECORD_SIMPLE_SAFEPOINT, | 211 RECORD_SIMPLE_SAFEPOINT, |
243 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS | 212 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS |
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
330 Safepoint::Kind kind, | 299 Safepoint::Kind kind, |
331 int arguments, | 300 int arguments, |
332 Safepoint::DeoptMode mode); | 301 Safepoint::DeoptMode mode); |
333 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); | 302 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); |
334 void RecordSafepoint(Safepoint::DeoptMode mode); | 303 void RecordSafepoint(Safepoint::DeoptMode mode); |
335 void RecordSafepointWithRegisters(LPointerMap* pointers, | 304 void RecordSafepointWithRegisters(LPointerMap* pointers, |
336 int arguments, | 305 int arguments, |
337 Safepoint::DeoptMode mode); | 306 Safepoint::DeoptMode mode); |
338 void RecordPosition(int position); | 307 void RecordPosition(int position); |
339 | 308 |
340 void RecordAndUpdatePosition(int position); | 309 void RecordAndUpdatePosition(int position) V8_OVERRIDE; |
341 | 310 |
342 static Condition TokenToCondition(Token::Value op, bool is_unsigned); | 311 static Condition TokenToCondition(Token::Value op, bool is_unsigned); |
343 void EmitGoto(int block); | 312 void EmitGoto(int block); |
344 template<class InstrType> | 313 template<class InstrType> |
345 void EmitBranch(InstrType instr, Condition cc); | 314 void EmitBranch(InstrType instr, Condition cc); |
346 template<class InstrType> | 315 template<class InstrType> |
347 void EmitFalseBranch(InstrType instr, Condition cc); | 316 void EmitFalseBranch(InstrType instr, Condition cc); |
348 void EmitNumberUntagD( | 317 void EmitNumberUntagD( |
349 Register input, | 318 Register input, |
350 Register temp, | 319 Register temp, |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
392 void EmitIsConstructCall(Register temp); | 361 void EmitIsConstructCall(Register temp); |
393 | 362 |
394 // Emits optimized code to deep-copy the contents of statically known | 363 // Emits optimized code to deep-copy the contents of statically known |
395 // object graphs (e.g. object literal boilerplate). | 364 // object graphs (e.g. object literal boilerplate). |
396 void EmitDeepCopy(Handle<JSObject> object, | 365 void EmitDeepCopy(Handle<JSObject> object, |
397 Register result, | 366 Register result, |
398 Register source, | 367 Register source, |
399 int* offset, | 368 int* offset, |
400 AllocationSiteMode mode); | 369 AllocationSiteMode mode); |
401 | 370 |
402 void EnsureSpaceForLazyDeopt(); | 371 void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE; |
403 void DoLoadKeyedExternalArray(LLoadKeyed* instr); | 372 void DoLoadKeyedExternalArray(LLoadKeyed* instr); |
404 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr); | 373 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr); |
405 void DoLoadKeyedFixedArray(LLoadKeyed* instr); | 374 void DoLoadKeyedFixedArray(LLoadKeyed* instr); |
406 void DoStoreKeyedExternalArray(LStoreKeyed* instr); | 375 void DoStoreKeyedExternalArray(LStoreKeyed* instr); |
407 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr); | 376 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr); |
408 void DoStoreKeyedFixedArray(LStoreKeyed* instr); | 377 void DoStoreKeyedFixedArray(LStoreKeyed* instr); |
409 | 378 |
410 void EmitReturn(LReturn* instr, bool dynamic_frame_alignment); | 379 void EmitReturn(LReturn* instr, bool dynamic_frame_alignment); |
411 | 380 |
412 // Emits code for pushing either a tagged constant, a (non-double) | 381 // Emits code for pushing either a tagged constant, a (non-double) |
413 // register, or a stack slot operand. | 382 // register, or a stack slot operand. |
414 void EmitPushTaggedOperand(LOperand* operand); | 383 void EmitPushTaggedOperand(LOperand* operand); |
415 | 384 |
416 void X87Fld(Operand src, X87OperandType opts); | 385 void X87Fld(Operand src, X87OperandType opts); |
417 | 386 |
418 void EmitFlushX87ForDeopt(); | 387 void EmitFlushX87ForDeopt(); |
419 void FlushX87StackIfNecessary(LInstruction* instr) { | 388 void FlushX87StackIfNecessary(LInstruction* instr) { |
420 x87_stack_.FlushIfNecessary(instr, this); | 389 x87_stack_.FlushIfNecessary(instr, this); |
421 } | 390 } |
422 friend class LGapResolver; | 391 friend class LGapResolver; |
423 | 392 |
424 #ifdef _MSC_VER | 393 #ifdef _MSC_VER |
425 // On windows, you may not access the stack more than one page below | 394 // On windows, you may not access the stack more than one page below |
426 // the most recently mapped page. To make the allocated area randomly | 395 // the most recently mapped page. To make the allocated area randomly |
427 // accessible, we write an arbitrary value to each page in range | 396 // accessible, we write an arbitrary value to each page in range |
428 // esp + offset - page_size .. esp in turn. | 397 // esp + offset - page_size .. esp in turn. |
429 void MakeSureStackPagesMapped(int offset); | 398 void MakeSureStackPagesMapped(int offset); |
430 #endif | 399 #endif |
431 | 400 |
432 Zone* zone_; | |
433 LPlatformChunk* const chunk_; | |
434 MacroAssembler* const masm_; | |
435 CompilationInfo* const info_; | |
436 | |
437 int current_block_; | |
438 int current_instruction_; | |
439 const ZoneList<LInstruction*>* instructions_; | |
440 ZoneList<LEnvironment*> deoptimizations_; | 401 ZoneList<LEnvironment*> deoptimizations_; |
441 ZoneList<Deoptimizer::JumpTableEntry> jump_table_; | 402 ZoneList<Deoptimizer::JumpTableEntry> jump_table_; |
442 ZoneList<Handle<Object> > deoptimization_literals_; | 403 ZoneList<Handle<Object> > deoptimization_literals_; |
443 int inlined_function_count_; | 404 int inlined_function_count_; |
444 Scope* const scope_; | 405 Scope* const scope_; |
445 Status status_; | |
446 TranslationBuffer translations_; | 406 TranslationBuffer translations_; |
447 ZoneList<LDeferredCode*> deferred_; | 407 ZoneList<LDeferredCode*> deferred_; |
448 bool dynamic_frame_alignment_; | 408 bool dynamic_frame_alignment_; |
449 bool support_aligned_spilled_doubles_; | 409 bool support_aligned_spilled_doubles_; |
450 int osr_pc_offset_; | 410 int osr_pc_offset_; |
451 int last_lazy_deopt_pc_; | |
452 bool frame_is_built_; | 411 bool frame_is_built_; |
453 | 412 |
454 class X87Stack { | 413 class X87Stack { |
455 public: | 414 public: |
456 explicit X87Stack(MacroAssembler* masm) | 415 explicit X87Stack(MacroAssembler* masm) |
457 : stack_depth_(0), is_mutable_(true), masm_(masm) { } | 416 : stack_depth_(0), is_mutable_(true), masm_(masm) { } |
458 explicit X87Stack(const X87Stack& other) | 417 explicit X87Stack(const X87Stack& other) |
459 : stack_depth_(other.stack_depth_), is_mutable_(false), masm_(masm()) { | 418 : stack_depth_(other.stack_depth_), is_mutable_(false), masm_(masm()) { |
460 for (int i = 0; i < stack_depth_; i++) { | 419 for (int i = 0; i < stack_depth_; i++) { |
461 stack_[i] = other.stack_[i]; | 420 stack_[i] = other.stack_[i]; |
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
569 Label exit_; | 528 Label exit_; |
570 Label* external_exit_; | 529 Label* external_exit_; |
571 Label done_; | 530 Label done_; |
572 int instruction_index_; | 531 int instruction_index_; |
573 LCodeGen::X87Stack x87_stack_; | 532 LCodeGen::X87Stack x87_stack_; |
574 }; | 533 }; |
575 | 534 |
576 } } // namespace v8::internal | 535 } } // namespace v8::internal |
577 | 536 |
578 #endif // V8_IA32_LITHIUM_CODEGEN_IA32_H_ | 537 #endif // V8_IA32_LITHIUM_CODEGEN_IA32_H_ |
OLD | NEW |