Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(65)

Side by Side Diff: src/x87/lithium-codegen-x87.h

Issue 293743005: Introduce x87 port (Closed) Base URL: git://github.com/v8/v8.git@master
Patch Set: Created 6 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_IA32_LITHIUM_CODEGEN_IA32_H_ 5 #ifndef V8_X87_LITHIUM_CODEGEN_X87_H_
6 #define V8_IA32_LITHIUM_CODEGEN_IA32_H_ 6 #define V8_X87_LITHIUM_CODEGEN_X87_H_
7 7
8 #include "ia32/lithium-ia32.h" 8 #include "x87/lithium-x87.h"
9 9
10 #include "checks.h" 10 #include "checks.h"
11 #include "deoptimizer.h" 11 #include "deoptimizer.h"
12 #include "ia32/lithium-gap-resolver-ia32.h" 12 #include "x87/lithium-gap-resolver-x87.h"
13 #include "lithium-codegen.h" 13 #include "lithium-codegen.h"
14 #include "safepoint-table.h" 14 #include "safepoint-table.h"
15 #include "scopes.h" 15 #include "scopes.h"
16 #include "utils.h" 16 #include "utils.h"
17 17
18 namespace v8 { 18 namespace v8 {
19 namespace internal { 19 namespace internal {
20 20
21 // Forward declarations. 21 // Forward declarations.
22 class LDeferredCode; 22 class LDeferredCode;
23 class LGapNode; 23 class LGapNode;
24 class SafepointGenerator; 24 class SafepointGenerator;
25 25
26 class LCodeGen: public LCodeGenBase { 26 class LCodeGen: public LCodeGenBase {
27 public: 27 public:
28 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) 28 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
29 : LCodeGenBase(chunk, assembler, info), 29 : LCodeGenBase(chunk, assembler, info),
30 deoptimizations_(4, info->zone()), 30 deoptimizations_(4, info->zone()),
31 jump_table_(4, info->zone()), 31 jump_table_(4, info->zone()),
32 deoptimization_literals_(8, info->zone()), 32 deoptimization_literals_(8, info->zone()),
33 inlined_function_count_(0), 33 inlined_function_count_(0),
34 scope_(info->scope()), 34 scope_(info->scope()),
35 translations_(info->zone()), 35 translations_(info->zone()),
36 deferred_(8, info->zone()), 36 deferred_(8, info->zone()),
37 dynamic_frame_alignment_(false), 37 dynamic_frame_alignment_(false),
38 support_aligned_spilled_doubles_(false), 38 support_aligned_spilled_doubles_(false),
39 osr_pc_offset_(-1), 39 osr_pc_offset_(-1),
40 frame_is_built_(false), 40 frame_is_built_(false),
41 x87_stack_(assembler),
41 safepoints_(info->zone()), 42 safepoints_(info->zone()),
42 resolver_(this), 43 resolver_(this),
43 expected_safepoint_kind_(Safepoint::kSimple) { 44 expected_safepoint_kind_(Safepoint::kSimple) {
44 PopulateDeoptimizationLiteralsWithInlinedFunctions(); 45 PopulateDeoptimizationLiteralsWithInlinedFunctions();
45 } 46 }
46 47
47 int LookupDestination(int block_id) const { 48 int LookupDestination(int block_id) const {
48 return chunk()->LookupDestination(block_id); 49 return chunk()->LookupDestination(block_id);
49 } 50 }
50 51
51 bool IsNextEmittedBlock(int block_id) const { 52 bool IsNextEmittedBlock(int block_id) const {
52 return LookupDestination(block_id) == GetNextEmittedBlock(); 53 return LookupDestination(block_id) == GetNextEmittedBlock();
53 } 54 }
54 55
55 bool NeedsEagerFrame() const { 56 bool NeedsEagerFrame() const {
56 return GetStackSlotCount() > 0 || 57 return GetStackSlotCount() > 0 ||
57 info()->is_non_deferred_calling() || 58 info()->is_non_deferred_calling() ||
58 !info()->IsStub() || 59 !info()->IsStub() ||
59 info()->requires_frame(); 60 info()->requires_frame();
60 } 61 }
61 bool NeedsDeferredFrame() const { 62 bool NeedsDeferredFrame() const {
62 return !NeedsEagerFrame() && info()->is_deferred_calling(); 63 return !NeedsEagerFrame() && info()->is_deferred_calling();
63 } 64 }
64 65
65 // Support for converting LOperands to assembler types. 66 // Support for converting LOperands to assembler types.
66 Operand ToOperand(LOperand* op) const; 67 Operand ToOperand(LOperand* op) const;
67 Register ToRegister(LOperand* op) const; 68 Register ToRegister(LOperand* op) const;
68 XMMRegister ToDoubleRegister(LOperand* op) const; 69 X87Register ToX87Register(LOperand* op) const;
69 70
70 bool IsInteger32(LConstantOperand* op) const; 71 bool IsInteger32(LConstantOperand* op) const;
71 bool IsSmi(LConstantOperand* op) const; 72 bool IsSmi(LConstantOperand* op) const;
72 Immediate ToImmediate(LOperand* op, const Representation& r) const { 73 Immediate ToImmediate(LOperand* op, const Representation& r) const {
73 return Immediate(ToRepresentation(LConstantOperand::cast(op), r)); 74 return Immediate(ToRepresentation(LConstantOperand::cast(op), r));
74 } 75 }
75 double ToDouble(LConstantOperand* op) const; 76 double ToDouble(LConstantOperand* op) const;
76 77
78 // Support for non-sse2 (x87) floating point stack handling.
79 // These functions maintain the mapping of physical stack registers to our
80 // virtual registers between instructions.
81 enum X87OperandType { kX87DoubleOperand, kX87FloatOperand, kX87IntOperand };
82
83 void X87Mov(X87Register reg, Operand src,
84 X87OperandType operand = kX87DoubleOperand);
85 void X87Mov(Operand src, X87Register reg,
86 X87OperandType operand = kX87DoubleOperand);
87
88 void X87PrepareBinaryOp(
89 X87Register left, X87Register right, X87Register result);
90
91 void X87LoadForUsage(X87Register reg);
92 void X87LoadForUsage(X87Register reg1, X87Register reg2);
93 void X87PrepareToWrite(X87Register reg) { x87_stack_.PrepareToWrite(reg); }
94 void X87CommitWrite(X87Register reg) { x87_stack_.CommitWrite(reg); }
95
96 void X87Fxch(X87Register reg, int other_slot = 0) {
97 x87_stack_.Fxch(reg, other_slot);
98 }
99 void X87Free(X87Register reg) {
100 x87_stack_.Free(reg);
101 }
102
103
104 bool X87StackEmpty() {
105 return x87_stack_.depth() == 0;
106 }
107
77 Handle<Object> ToHandle(LConstantOperand* op) const; 108 Handle<Object> ToHandle(LConstantOperand* op) const;
78 109
79 // The operand denoting the second word (the one with a higher address) of 110 // The operand denoting the second word (the one with a higher address) of
80 // a double stack slot. 111 // a double stack slot.
81 Operand HighOperand(LOperand* op); 112 Operand HighOperand(LOperand* op);
82 113
83 // Try to generate code for the entire chunk, but it may fail if the 114 // Try to generate code for the entire chunk, but it may fail if the
84 // chunk contains constructs we cannot handle. Returns true if the 115 // chunk contains constructs we cannot handle. Returns true if the
85 // code generation attempt succeeded. 116 // code generation attempt succeeded.
86 bool GenerateCode(); 117 bool GenerateCode();
87 118
88 // Finish the code by setting stack height, safepoint, and bailout 119 // Finish the code by setting stack height, safepoint, and bailout
89 // information on it. 120 // information on it.
90 void FinishCode(Handle<Code> code); 121 void FinishCode(Handle<Code> code);
91 122
92 // Deferred code support. 123 // Deferred code support.
93 void DoDeferredNumberTagD(LNumberTagD* instr); 124 void DoDeferredNumberTagD(LNumberTagD* instr);
94 125
95 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; 126 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
96 void DoDeferredNumberTagIU(LInstruction* instr, 127 void DoDeferredNumberTagIU(LInstruction* instr,
97 LOperand* value, 128 LOperand* value,
98 LOperand* temp1, 129 LOperand* temp,
99 LOperand* temp2,
100 IntegerSignedness signedness); 130 IntegerSignedness signedness);
101 131
102 void DoDeferredTaggedToI(LTaggedToI* instr, Label* done); 132 void DoDeferredTaggedToI(LTaggedToI* instr, Label* done);
103 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr); 133 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
104 void DoDeferredStackCheck(LStackCheck* instr); 134 void DoDeferredStackCheck(LStackCheck* instr);
105 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); 135 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
106 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); 136 void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
107 void DoDeferredAllocate(LAllocate* instr); 137 void DoDeferredAllocate(LAllocate* instr);
108 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, 138 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
109 Label* map_check); 139 Label* map_check);
(...skipping 14 matching lines...) Expand all
124 // Declare methods that deal with the individual node types. 154 // Declare methods that deal with the individual node types.
125 #define DECLARE_DO(type) void Do##type(L##type* node); 155 #define DECLARE_DO(type) void Do##type(L##type* node);
126 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) 156 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
127 #undef DECLARE_DO 157 #undef DECLARE_DO
128 158
129 private: 159 private:
130 StrictMode strict_mode() const { return info()->strict_mode(); } 160 StrictMode strict_mode() const { return info()->strict_mode(); }
131 161
132 Scope* scope() const { return scope_; } 162 Scope* scope() const { return scope_; }
133 163
134 XMMRegister double_scratch0() const { return xmm0; }
135
136 void EmitClassOfTest(Label* if_true, 164 void EmitClassOfTest(Label* if_true,
137 Label* if_false, 165 Label* if_false,
138 Handle<String> class_name, 166 Handle<String> class_name,
139 Register input, 167 Register input,
140 Register temporary, 168 Register temporary,
141 Register temporary2); 169 Register temporary2);
142 170
143 int GetStackSlotCount() const { return chunk()->spill_slot_count(); } 171 int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
144 172
145 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } 173 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
146 174
147 void SaveCallerDoubles();
148 void RestoreCallerDoubles();
149
150 // Code generation passes. Returns true if code generation should 175 // Code generation passes. Returns true if code generation should
151 // continue. 176 // continue.
152 void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE; 177 void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE;
153 void GenerateBodyInstructionPost(LInstruction* instr) V8_OVERRIDE; 178 void GenerateBodyInstructionPost(LInstruction* instr) V8_OVERRIDE;
154 bool GeneratePrologue(); 179 bool GeneratePrologue();
155 bool GenerateDeferredCode(); 180 bool GenerateDeferredCode();
156 bool GenerateJumpTable(); 181 bool GenerateJumpTable();
157 bool GenerateSafepointTable(); 182 bool GenerateSafepointTable();
158 183
159 // Generates the custom OSR entrypoint and sets the osr_pc_offset. 184 // Generates the custom OSR entrypoint and sets the osr_pc_offset.
160 void GenerateOsrPrologue(); 185 void GenerateOsrPrologue();
161 186
162 enum SafepointMode { 187 enum SafepointMode {
163 RECORD_SIMPLE_SAFEPOINT, 188 RECORD_SIMPLE_SAFEPOINT,
164 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 189 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
165 }; 190 };
166 191
167 void CallCode(Handle<Code> code, 192 void CallCode(Handle<Code> code,
168 RelocInfo::Mode mode, 193 RelocInfo::Mode mode,
169 LInstruction* instr); 194 LInstruction* instr);
170 195
171 void CallCodeGeneric(Handle<Code> code, 196 void CallCodeGeneric(Handle<Code> code,
172 RelocInfo::Mode mode, 197 RelocInfo::Mode mode,
173 LInstruction* instr, 198 LInstruction* instr,
174 SafepointMode safepoint_mode); 199 SafepointMode safepoint_mode);
175 200
176 void CallRuntime(const Runtime::Function* fun, 201 void CallRuntime(const Runtime::Function* fun,
177 int argc, 202 int argc,
178 LInstruction* instr, 203 LInstruction* instr);
179 SaveFPRegsMode save_doubles = kDontSaveFPRegs);
180 204
181 void CallRuntime(Runtime::FunctionId id, 205 void CallRuntime(Runtime::FunctionId id,
182 int argc, 206 int argc,
183 LInstruction* instr) { 207 LInstruction* instr) {
184 const Runtime::Function* function = Runtime::FunctionForId(id); 208 const Runtime::Function* function = Runtime::FunctionForId(id);
185 CallRuntime(function, argc, instr); 209 CallRuntime(function, argc, instr);
186 } 210 }
187 211
188 void CallRuntimeFromDeferred(Runtime::FunctionId id, 212 void CallRuntimeFromDeferred(Runtime::FunctionId id,
189 int argc, 213 int argc,
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
225 bool is_tagged, 249 bool is_tagged,
226 bool is_uint32, 250 bool is_uint32,
227 int* object_index_pointer, 251 int* object_index_pointer,
228 int* dematerialized_index_pointer); 252 int* dematerialized_index_pointer);
229 void PopulateDeoptimizationData(Handle<Code> code); 253 void PopulateDeoptimizationData(Handle<Code> code);
230 int DefineDeoptimizationLiteral(Handle<Object> literal); 254 int DefineDeoptimizationLiteral(Handle<Object> literal);
231 255
232 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); 256 void PopulateDeoptimizationLiteralsWithInlinedFunctions();
233 257
234 Register ToRegister(int index) const; 258 Register ToRegister(int index) const;
235 XMMRegister ToDoubleRegister(int index) const; 259 X87Register ToX87Register(int index) const;
236 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const; 260 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
237 int32_t ToInteger32(LConstantOperand* op) const; 261 int32_t ToInteger32(LConstantOperand* op) const;
238 ExternalReference ToExternalReference(LConstantOperand* op) const; 262 ExternalReference ToExternalReference(LConstantOperand* op) const;
239 263
240 Operand BuildFastArrayOperand(LOperand* elements_pointer, 264 Operand BuildFastArrayOperand(LOperand* elements_pointer,
241 LOperand* key, 265 LOperand* key,
242 Representation key_representation, 266 Representation key_representation,
243 ElementsKind elements_kind, 267 ElementsKind elements_kind,
244 uint32_t offset, 268 uint32_t offset,
245 uint32_t additional_index = 0); 269 uint32_t additional_index = 0);
(...skipping 18 matching lines...) Expand all
264 void RecordAndWritePosition(int position) V8_OVERRIDE; 288 void RecordAndWritePosition(int position) V8_OVERRIDE;
265 289
266 static Condition TokenToCondition(Token::Value op, bool is_unsigned); 290 static Condition TokenToCondition(Token::Value op, bool is_unsigned);
267 void EmitGoto(int block); 291 void EmitGoto(int block);
268 292
269 // EmitBranch expects to be the last instruction of a block. 293 // EmitBranch expects to be the last instruction of a block.
270 template<class InstrType> 294 template<class InstrType>
271 void EmitBranch(InstrType instr, Condition cc); 295 void EmitBranch(InstrType instr, Condition cc);
272 template<class InstrType> 296 template<class InstrType>
273 void EmitFalseBranch(InstrType instr, Condition cc); 297 void EmitFalseBranch(InstrType instr, Condition cc);
274 void EmitNumberUntagD( 298 void EmitNumberUntagDNoSSE2(
275 Register input, 299 Register input,
276 Register temp, 300 Register temp,
277 XMMRegister result, 301 X87Register res_reg,
278 bool allow_undefined_as_nan, 302 bool allow_undefined_as_nan,
279 bool deoptimize_on_minus_zero, 303 bool deoptimize_on_minus_zero,
280 LEnvironment* env, 304 LEnvironment* env,
281 NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED); 305 NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED);
282 306
283 // Emits optimized code for typeof x == "y". Modifies input register. 307 // Emits optimized code for typeof x == "y". Modifies input register.
284 // Returns the condition on which a final split to 308 // Returns the condition on which a final split to
285 // true and false label should be made, to optimize fallthrough. 309 // true and false label should be made, to optimize fallthrough.
286 Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input); 310 Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input);
287 311
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
320 void DoStoreKeyedExternalArray(LStoreKeyed* instr); 344 void DoStoreKeyedExternalArray(LStoreKeyed* instr);
321 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr); 345 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
322 void DoStoreKeyedFixedArray(LStoreKeyed* instr); 346 void DoStoreKeyedFixedArray(LStoreKeyed* instr);
323 347
324 void EmitReturn(LReturn* instr, bool dynamic_frame_alignment); 348 void EmitReturn(LReturn* instr, bool dynamic_frame_alignment);
325 349
326 // Emits code for pushing either a tagged constant, a (non-double) 350 // Emits code for pushing either a tagged constant, a (non-double)
327 // register, or a stack slot operand. 351 // register, or a stack slot operand.
328 void EmitPushTaggedOperand(LOperand* operand); 352 void EmitPushTaggedOperand(LOperand* operand);
329 353
354 void X87Fld(Operand src, X87OperandType opts);
355
356 void EmitFlushX87ForDeopt();
357 void FlushX87StackIfNecessary(LInstruction* instr) {
358 x87_stack_.FlushIfNecessary(instr, this);
359 }
330 friend class LGapResolver; 360 friend class LGapResolver;
331 361
332 #ifdef _MSC_VER 362 #ifdef _MSC_VER
333 // On windows, you may not access the stack more than one page below 363 // On windows, you may not access the stack more than one page below
334 // the most recently mapped page. To make the allocated area randomly 364 // the most recently mapped page. To make the allocated area randomly
335 // accessible, we write an arbitrary value to each page in range 365 // accessible, we write an arbitrary value to each page in range
336 // esp + offset - page_size .. esp in turn. 366 // esp + offset - page_size .. esp in turn.
337 void MakeSureStackPagesMapped(int offset); 367 void MakeSureStackPagesMapped(int offset);
338 #endif 368 #endif
339 369
340 ZoneList<LEnvironment*> deoptimizations_; 370 ZoneList<LEnvironment*> deoptimizations_;
341 ZoneList<Deoptimizer::JumpTableEntry> jump_table_; 371 ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
342 ZoneList<Handle<Object> > deoptimization_literals_; 372 ZoneList<Handle<Object> > deoptimization_literals_;
343 int inlined_function_count_; 373 int inlined_function_count_;
344 Scope* const scope_; 374 Scope* const scope_;
345 TranslationBuffer translations_; 375 TranslationBuffer translations_;
346 ZoneList<LDeferredCode*> deferred_; 376 ZoneList<LDeferredCode*> deferred_;
347 bool dynamic_frame_alignment_; 377 bool dynamic_frame_alignment_;
348 bool support_aligned_spilled_doubles_; 378 bool support_aligned_spilled_doubles_;
349 int osr_pc_offset_; 379 int osr_pc_offset_;
350 bool frame_is_built_; 380 bool frame_is_built_;
351 381
382 class X87Stack {
383 public:
384 explicit X87Stack(MacroAssembler* masm)
385 : stack_depth_(0), is_mutable_(true), masm_(masm) { }
386 explicit X87Stack(const X87Stack& other)
387 : stack_depth_(other.stack_depth_), is_mutable_(false), masm_(masm()) {
388 for (int i = 0; i < stack_depth_; i++) {
389 stack_[i] = other.stack_[i];
390 }
391 }
392 bool operator==(const X87Stack& other) const {
393 if (stack_depth_ != other.stack_depth_) return false;
394 for (int i = 0; i < stack_depth_; i++) {
395 if (!stack_[i].is(other.stack_[i])) return false;
396 }
397 return true;
398 }
399 bool Contains(X87Register reg);
400 void Fxch(X87Register reg, int other_slot = 0);
401 void Free(X87Register reg);
402 void PrepareToWrite(X87Register reg);
403 void CommitWrite(X87Register reg);
404 void FlushIfNecessary(LInstruction* instr, LCodeGen* cgen);
405 void LeavingBlock(int current_block_id, LGoto* goto_instr);
406 int depth() const { return stack_depth_; }
407 void pop() {
408 ASSERT(is_mutable_);
409 stack_depth_--;
410 }
411 void push(X87Register reg) {
412 ASSERT(is_mutable_);
413 ASSERT(stack_depth_ < X87Register::kMaxNumAllocatableRegisters);
414 stack_[stack_depth_] = reg;
415 stack_depth_++;
416 }
417
418 MacroAssembler* masm() const { return masm_; }
419 Isolate* isolate() const { return masm_->isolate(); }
420
421 private:
422 int ArrayIndex(X87Register reg);
423 int st2idx(int pos);
424
425 X87Register stack_[X87Register::kMaxNumAllocatableRegisters];
426 int stack_depth_;
427 bool is_mutable_;
428 MacroAssembler* masm_;
429 };
430 X87Stack x87_stack_;
431
352 // Builder that keeps track of safepoints in the code. The table 432 // Builder that keeps track of safepoints in the code. The table
353 // itself is emitted at the end of the generated code. 433 // itself is emitted at the end of the generated code.
354 SafepointTableBuilder safepoints_; 434 SafepointTableBuilder safepoints_;
355 435
356 // Compiler from a set of parallel moves to a sequential list of moves. 436 // Compiler from a set of parallel moves to a sequential list of moves.
357 LGapResolver resolver_; 437 LGapResolver resolver_;
358 438
359 Safepoint::Kind expected_safepoint_kind_; 439 Safepoint::Kind expected_safepoint_kind_;
360 440
361 class PushSafepointRegistersScope V8_FINAL BASE_EMBEDDED { 441 class PushSafepointRegistersScope V8_FINAL BASE_EMBEDDED {
(...skipping 18 matching lines...) Expand all
380 460
381 friend class LDeferredCode; 461 friend class LDeferredCode;
382 friend class LEnvironment; 462 friend class LEnvironment;
383 friend class SafepointGenerator; 463 friend class SafepointGenerator;
384 DISALLOW_COPY_AND_ASSIGN(LCodeGen); 464 DISALLOW_COPY_AND_ASSIGN(LCodeGen);
385 }; 465 };
386 466
387 467
388 class LDeferredCode : public ZoneObject { 468 class LDeferredCode : public ZoneObject {
389 public: 469 public:
390 explicit LDeferredCode(LCodeGen* codegen) 470 explicit LDeferredCode(LCodeGen* codegen, const LCodeGen::X87Stack& x87_stack)
391 : codegen_(codegen), 471 : codegen_(codegen),
392 external_exit_(NULL), 472 external_exit_(NULL),
393 instruction_index_(codegen->current_instruction_) { 473 instruction_index_(codegen->current_instruction_),
474 x87_stack_(x87_stack) {
394 codegen->AddDeferredCode(this); 475 codegen->AddDeferredCode(this);
395 } 476 }
396 477
397 virtual ~LDeferredCode() {} 478 virtual ~LDeferredCode() {}
398 virtual void Generate() = 0; 479 virtual void Generate() = 0;
399 virtual LInstruction* instr() = 0; 480 virtual LInstruction* instr() = 0;
400 481
401 void SetExit(Label* exit) { external_exit_ = exit; } 482 void SetExit(Label* exit) { external_exit_ = exit; }
402 Label* entry() { return &entry_; } 483 Label* entry() { return &entry_; }
403 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; } 484 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
404 Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); } 485 Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); }
405 int instruction_index() const { return instruction_index_; } 486 int instruction_index() const { return instruction_index_; }
487 const LCodeGen::X87Stack& x87_stack() const { return x87_stack_; }
406 488
407 protected: 489 protected:
408 LCodeGen* codegen() const { return codegen_; } 490 LCodeGen* codegen() const { return codegen_; }
409 MacroAssembler* masm() const { return codegen_->masm(); } 491 MacroAssembler* masm() const { return codegen_->masm(); }
410 492
411 private: 493 private:
412 LCodeGen* codegen_; 494 LCodeGen* codegen_;
413 Label entry_; 495 Label entry_;
414 Label exit_; 496 Label exit_;
415 Label* external_exit_; 497 Label* external_exit_;
416 Label done_; 498 Label done_;
417 int instruction_index_; 499 int instruction_index_;
500 LCodeGen::X87Stack x87_stack_;
418 }; 501 };
419 502
420 } } // namespace v8::internal 503 } } // namespace v8::internal
421 504
422 #endif // V8_IA32_LITHIUM_CODEGEN_IA32_H_ 505 #endif // V8_X87_LITHIUM_CODEGEN_X87_H_
OLDNEW
« src/x87/OWNERS ('K') | « src/x87/ic-x87.cc ('k') | src/x87/lithium-codegen-x87.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698