Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(92)

Side by Side Diff: src/x87/lithium-codegen-x87.h

Issue 293743005: Introduce x87 port (Closed) Base URL: git://github.com/v8/v8.git@master
Patch Set: rebase Created 6 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/x87/ic-x87.cc ('k') | src/x87/lithium-codegen-x87.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_IA32_LITHIUM_CODEGEN_IA32_H_ 5 #ifndef V8_X87_LITHIUM_CODEGEN_X87_H_
6 #define V8_IA32_LITHIUM_CODEGEN_IA32_H_ 6 #define V8_X87_LITHIUM_CODEGEN_X87_H_
7 7
8 #include "ia32/lithium-ia32.h" 8 #include "x87/lithium-x87.h"
9 9
10 #include "checks.h" 10 #include "checks.h"
11 #include "deoptimizer.h" 11 #include "deoptimizer.h"
12 #include "ia32/lithium-gap-resolver-ia32.h" 12 #include "x87/lithium-gap-resolver-x87.h"
13 #include "lithium-codegen.h" 13 #include "lithium-codegen.h"
14 #include "safepoint-table.h" 14 #include "safepoint-table.h"
15 #include "scopes.h" 15 #include "scopes.h"
16 #include "utils.h" 16 #include "utils.h"
17 17
18 namespace v8 { 18 namespace v8 {
19 namespace internal { 19 namespace internal {
20 20
21 // Forward declarations. 21 // Forward declarations.
22 class LDeferredCode; 22 class LDeferredCode;
23 class LGapNode; 23 class LGapNode;
24 class SafepointGenerator; 24 class SafepointGenerator;
25 25
26 class LCodeGen: public LCodeGenBase { 26 class LCodeGen: public LCodeGenBase {
27 public: 27 public:
28 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) 28 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
29 : LCodeGenBase(chunk, assembler, info), 29 : LCodeGenBase(chunk, assembler, info),
30 deoptimizations_(4, info->zone()), 30 deoptimizations_(4, info->zone()),
31 jump_table_(4, info->zone()), 31 jump_table_(4, info->zone()),
32 deoptimization_literals_(8, info->zone()), 32 deoptimization_literals_(8, info->zone()),
33 inlined_function_count_(0), 33 inlined_function_count_(0),
34 scope_(info->scope()), 34 scope_(info->scope()),
35 translations_(info->zone()), 35 translations_(info->zone()),
36 deferred_(8, info->zone()), 36 deferred_(8, info->zone()),
37 dynamic_frame_alignment_(false), 37 dynamic_frame_alignment_(false),
38 support_aligned_spilled_doubles_(false), 38 support_aligned_spilled_doubles_(false),
39 osr_pc_offset_(-1), 39 osr_pc_offset_(-1),
40 frame_is_built_(false), 40 frame_is_built_(false),
41 x87_stack_(assembler),
41 safepoints_(info->zone()), 42 safepoints_(info->zone()),
42 resolver_(this), 43 resolver_(this),
43 expected_safepoint_kind_(Safepoint::kSimple) { 44 expected_safepoint_kind_(Safepoint::kSimple) {
44 PopulateDeoptimizationLiteralsWithInlinedFunctions(); 45 PopulateDeoptimizationLiteralsWithInlinedFunctions();
45 } 46 }
46 47
47 int LookupDestination(int block_id) const { 48 int LookupDestination(int block_id) const {
48 return chunk()->LookupDestination(block_id); 49 return chunk()->LookupDestination(block_id);
49 } 50 }
50 51
51 bool IsNextEmittedBlock(int block_id) const { 52 bool IsNextEmittedBlock(int block_id) const {
52 return LookupDestination(block_id) == GetNextEmittedBlock(); 53 return LookupDestination(block_id) == GetNextEmittedBlock();
53 } 54 }
54 55
55 bool NeedsEagerFrame() const { 56 bool NeedsEagerFrame() const {
56 return GetStackSlotCount() > 0 || 57 return GetStackSlotCount() > 0 ||
57 info()->is_non_deferred_calling() || 58 info()->is_non_deferred_calling() ||
58 !info()->IsStub() || 59 !info()->IsStub() ||
59 info()->requires_frame(); 60 info()->requires_frame();
60 } 61 }
61 bool NeedsDeferredFrame() const { 62 bool NeedsDeferredFrame() const {
62 return !NeedsEagerFrame() && info()->is_deferred_calling(); 63 return !NeedsEagerFrame() && info()->is_deferred_calling();
63 } 64 }
64 65
65 // Support for converting LOperands to assembler types. 66 // Support for converting LOperands to assembler types.
66 Operand ToOperand(LOperand* op) const; 67 Operand ToOperand(LOperand* op) const;
67 Register ToRegister(LOperand* op) const; 68 Register ToRegister(LOperand* op) const;
68 XMMRegister ToDoubleRegister(LOperand* op) const; 69 X87Register ToX87Register(LOperand* op) const;
69 70
70 bool IsInteger32(LConstantOperand* op) const; 71 bool IsInteger32(LConstantOperand* op) const;
71 bool IsSmi(LConstantOperand* op) const; 72 bool IsSmi(LConstantOperand* op) const;
72 Immediate ToImmediate(LOperand* op, const Representation& r) const { 73 Immediate ToImmediate(LOperand* op, const Representation& r) const {
73 return Immediate(ToRepresentation(LConstantOperand::cast(op), r)); 74 return Immediate(ToRepresentation(LConstantOperand::cast(op), r));
74 } 75 }
75 double ToDouble(LConstantOperand* op) const; 76 double ToDouble(LConstantOperand* op) const;
76 77
78 // Support for non-sse2 (x87) floating point stack handling.
79 // These functions maintain the mapping of physical stack registers to our
80 // virtual registers between instructions.
81 enum X87OperandType { kX87DoubleOperand, kX87FloatOperand, kX87IntOperand };
82
83 void X87Mov(X87Register reg, Operand src,
84 X87OperandType operand = kX87DoubleOperand);
85 void X87Mov(Operand src, X87Register reg,
86 X87OperandType operand = kX87DoubleOperand);
87
88 void X87PrepareBinaryOp(
89 X87Register left, X87Register right, X87Register result);
90
91 void X87LoadForUsage(X87Register reg);
92 void X87LoadForUsage(X87Register reg1, X87Register reg2);
93 void X87PrepareToWrite(X87Register reg) { x87_stack_.PrepareToWrite(reg); }
94 void X87CommitWrite(X87Register reg) { x87_stack_.CommitWrite(reg); }
95
96 void X87Fxch(X87Register reg, int other_slot = 0) {
97 x87_stack_.Fxch(reg, other_slot);
98 }
99 void X87Free(X87Register reg) {
100 x87_stack_.Free(reg);
101 }
102
103
104 bool X87StackEmpty() {
105 return x87_stack_.depth() == 0;
106 }
107
77 Handle<Object> ToHandle(LConstantOperand* op) const; 108 Handle<Object> ToHandle(LConstantOperand* op) const;
78 109
79 // The operand denoting the second word (the one with a higher address) of 110 // The operand denoting the second word (the one with a higher address) of
80 // a double stack slot. 111 // a double stack slot.
81 Operand HighOperand(LOperand* op); 112 Operand HighOperand(LOperand* op);
82 113
83 // Try to generate code for the entire chunk, but it may fail if the 114 // Try to generate code for the entire chunk, but it may fail if the
84 // chunk contains constructs we cannot handle. Returns true if the 115 // chunk contains constructs we cannot handle. Returns true if the
85 // code generation attempt succeeded. 116 // code generation attempt succeeded.
86 bool GenerateCode(); 117 bool GenerateCode();
87 118
88 // Finish the code by setting stack height, safepoint, and bailout 119 // Finish the code by setting stack height, safepoint, and bailout
89 // information on it. 120 // information on it.
90 void FinishCode(Handle<Code> code); 121 void FinishCode(Handle<Code> code);
91 122
92 // Deferred code support. 123 // Deferred code support.
93 void DoDeferredNumberTagD(LNumberTagD* instr); 124 void DoDeferredNumberTagD(LNumberTagD* instr);
94 125
95 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; 126 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
96 void DoDeferredNumberTagIU(LInstruction* instr, 127 void DoDeferredNumberTagIU(LInstruction* instr,
97 LOperand* value, 128 LOperand* value,
98 LOperand* temp1, 129 LOperand* temp,
99 LOperand* temp2,
100 IntegerSignedness signedness); 130 IntegerSignedness signedness);
101 131
102 void DoDeferredTaggedToI(LTaggedToI* instr, Label* done); 132 void DoDeferredTaggedToI(LTaggedToI* instr, Label* done);
103 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr); 133 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
104 void DoDeferredStackCheck(LStackCheck* instr); 134 void DoDeferredStackCheck(LStackCheck* instr);
105 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); 135 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
106 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); 136 void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
107 void DoDeferredAllocate(LAllocate* instr); 137 void DoDeferredAllocate(LAllocate* instr);
108 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, 138 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
109 Label* map_check); 139 Label* map_check);
(...skipping 14 matching lines...) Expand all
124 // Declare methods that deal with the individual node types. 154 // Declare methods that deal with the individual node types.
125 #define DECLARE_DO(type) void Do##type(L##type* node); 155 #define DECLARE_DO(type) void Do##type(L##type* node);
126 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) 156 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
127 #undef DECLARE_DO 157 #undef DECLARE_DO
128 158
129 private: 159 private:
130 StrictMode strict_mode() const { return info()->strict_mode(); } 160 StrictMode strict_mode() const { return info()->strict_mode(); }
131 161
132 Scope* scope() const { return scope_; } 162 Scope* scope() const { return scope_; }
133 163
134 XMMRegister double_scratch0() const { return xmm0; }
135
136 void EmitClassOfTest(Label* if_true, 164 void EmitClassOfTest(Label* if_true,
137 Label* if_false, 165 Label* if_false,
138 Handle<String> class_name, 166 Handle<String> class_name,
139 Register input, 167 Register input,
140 Register temporary, 168 Register temporary,
141 Register temporary2); 169 Register temporary2);
142 170
143 int GetStackSlotCount() const { return chunk()->spill_slot_count(); } 171 int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
144 172
145 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } 173 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
146 174
147 void SaveCallerDoubles();
148 void RestoreCallerDoubles();
149
150 // Code generation passes. Returns true if code generation should 175 // Code generation passes. Returns true if code generation should
151 // continue. 176 // continue.
152 void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE; 177 void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE;
153 void GenerateBodyInstructionPost(LInstruction* instr) V8_OVERRIDE; 178 void GenerateBodyInstructionPost(LInstruction* instr) V8_OVERRIDE;
154 bool GeneratePrologue(); 179 bool GeneratePrologue();
155 bool GenerateDeferredCode(); 180 bool GenerateDeferredCode();
156 bool GenerateJumpTable(); 181 bool GenerateJumpTable();
157 bool GenerateSafepointTable(); 182 bool GenerateSafepointTable();
158 183
159 // Generates the custom OSR entrypoint and sets the osr_pc_offset. 184 // Generates the custom OSR entrypoint and sets the osr_pc_offset.
160 void GenerateOsrPrologue(); 185 void GenerateOsrPrologue();
161 186
162 enum SafepointMode { 187 enum SafepointMode {
163 RECORD_SIMPLE_SAFEPOINT, 188 RECORD_SIMPLE_SAFEPOINT,
164 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 189 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
165 }; 190 };
166 191
167 void CallCode(Handle<Code> code, 192 void CallCode(Handle<Code> code,
168 RelocInfo::Mode mode, 193 RelocInfo::Mode mode,
169 LInstruction* instr); 194 LInstruction* instr);
170 195
171 void CallCodeGeneric(Handle<Code> code, 196 void CallCodeGeneric(Handle<Code> code,
172 RelocInfo::Mode mode, 197 RelocInfo::Mode mode,
173 LInstruction* instr, 198 LInstruction* instr,
174 SafepointMode safepoint_mode); 199 SafepointMode safepoint_mode);
175 200
176 void CallRuntime(const Runtime::Function* fun, 201 void CallRuntime(const Runtime::Function* fun,
177 int argc, 202 int argc,
178 LInstruction* instr, 203 LInstruction* instr);
179 SaveFPRegsMode save_doubles = kDontSaveFPRegs);
180 204
181 void CallRuntime(Runtime::FunctionId id, 205 void CallRuntime(Runtime::FunctionId id,
182 int argc, 206 int argc,
183 LInstruction* instr) { 207 LInstruction* instr) {
184 const Runtime::Function* function = Runtime::FunctionForId(id); 208 const Runtime::Function* function = Runtime::FunctionForId(id);
185 CallRuntime(function, argc, instr); 209 CallRuntime(function, argc, instr);
186 } 210 }
187 211
188 void CallRuntimeFromDeferred(Runtime::FunctionId id, 212 void CallRuntimeFromDeferred(Runtime::FunctionId id,
189 int argc, 213 int argc,
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
225 bool is_tagged, 249 bool is_tagged,
226 bool is_uint32, 250 bool is_uint32,
227 int* object_index_pointer, 251 int* object_index_pointer,
228 int* dematerialized_index_pointer); 252 int* dematerialized_index_pointer);
229 void PopulateDeoptimizationData(Handle<Code> code); 253 void PopulateDeoptimizationData(Handle<Code> code);
230 int DefineDeoptimizationLiteral(Handle<Object> literal); 254 int DefineDeoptimizationLiteral(Handle<Object> literal);
231 255
232 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); 256 void PopulateDeoptimizationLiteralsWithInlinedFunctions();
233 257
234 Register ToRegister(int index) const; 258 Register ToRegister(int index) const;
235 XMMRegister ToDoubleRegister(int index) const; 259 X87Register ToX87Register(int index) const;
236 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const; 260 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
237 int32_t ToInteger32(LConstantOperand* op) const; 261 int32_t ToInteger32(LConstantOperand* op) const;
238 ExternalReference ToExternalReference(LConstantOperand* op) const; 262 ExternalReference ToExternalReference(LConstantOperand* op) const;
239 263
240 Operand BuildFastArrayOperand(LOperand* elements_pointer, 264 Operand BuildFastArrayOperand(LOperand* elements_pointer,
241 LOperand* key, 265 LOperand* key,
242 Representation key_representation, 266 Representation key_representation,
243 ElementsKind elements_kind, 267 ElementsKind elements_kind,
244 uint32_t base_offset); 268 uint32_t base_offset);
245 269
(...skipping 17 matching lines...) Expand all
263 void RecordAndWritePosition(int position) V8_OVERRIDE; 287 void RecordAndWritePosition(int position) V8_OVERRIDE;
264 288
265 static Condition TokenToCondition(Token::Value op, bool is_unsigned); 289 static Condition TokenToCondition(Token::Value op, bool is_unsigned);
266 void EmitGoto(int block); 290 void EmitGoto(int block);
267 291
268 // EmitBranch expects to be the last instruction of a block. 292 // EmitBranch expects to be the last instruction of a block.
269 template<class InstrType> 293 template<class InstrType>
270 void EmitBranch(InstrType instr, Condition cc); 294 void EmitBranch(InstrType instr, Condition cc);
271 template<class InstrType> 295 template<class InstrType>
272 void EmitFalseBranch(InstrType instr, Condition cc); 296 void EmitFalseBranch(InstrType instr, Condition cc);
273 void EmitNumberUntagD( 297 void EmitNumberUntagDNoSSE2(
274 Register input, 298 Register input,
275 Register temp, 299 Register temp,
276 XMMRegister result, 300 X87Register res_reg,
277 bool allow_undefined_as_nan, 301 bool allow_undefined_as_nan,
278 bool deoptimize_on_minus_zero, 302 bool deoptimize_on_minus_zero,
279 LEnvironment* env, 303 LEnvironment* env,
280 NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED); 304 NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED);
281 305
282 // Emits optimized code for typeof x == "y". Modifies input register. 306 // Emits optimized code for typeof x == "y". Modifies input register.
283 // Returns the condition on which a final split to 307 // Returns the condition on which a final split to
284 // true and false label should be made, to optimize fallthrough. 308 // true and false label should be made, to optimize fallthrough.
285 Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input); 309 Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input);
286 310
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
319 void DoStoreKeyedExternalArray(LStoreKeyed* instr); 343 void DoStoreKeyedExternalArray(LStoreKeyed* instr);
320 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr); 344 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
321 void DoStoreKeyedFixedArray(LStoreKeyed* instr); 345 void DoStoreKeyedFixedArray(LStoreKeyed* instr);
322 346
323 void EmitReturn(LReturn* instr, bool dynamic_frame_alignment); 347 void EmitReturn(LReturn* instr, bool dynamic_frame_alignment);
324 348
325 // Emits code for pushing either a tagged constant, a (non-double) 349 // Emits code for pushing either a tagged constant, a (non-double)
326 // register, or a stack slot operand. 350 // register, or a stack slot operand.
327 void EmitPushTaggedOperand(LOperand* operand); 351 void EmitPushTaggedOperand(LOperand* operand);
328 352
353 void X87Fld(Operand src, X87OperandType opts);
354
355 void EmitFlushX87ForDeopt();
356 void FlushX87StackIfNecessary(LInstruction* instr) {
357 x87_stack_.FlushIfNecessary(instr, this);
358 }
329 friend class LGapResolver; 359 friend class LGapResolver;
330 360
331 #ifdef _MSC_VER 361 #ifdef _MSC_VER
332 // On windows, you may not access the stack more than one page below 362 // On windows, you may not access the stack more than one page below
333 // the most recently mapped page. To make the allocated area randomly 363 // the most recently mapped page. To make the allocated area randomly
334 // accessible, we write an arbitrary value to each page in range 364 // accessible, we write an arbitrary value to each page in range
335 // esp + offset - page_size .. esp in turn. 365 // esp + offset - page_size .. esp in turn.
336 void MakeSureStackPagesMapped(int offset); 366 void MakeSureStackPagesMapped(int offset);
337 #endif 367 #endif
338 368
339 ZoneList<LEnvironment*> deoptimizations_; 369 ZoneList<LEnvironment*> deoptimizations_;
340 ZoneList<Deoptimizer::JumpTableEntry> jump_table_; 370 ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
341 ZoneList<Handle<Object> > deoptimization_literals_; 371 ZoneList<Handle<Object> > deoptimization_literals_;
342 int inlined_function_count_; 372 int inlined_function_count_;
343 Scope* const scope_; 373 Scope* const scope_;
344 TranslationBuffer translations_; 374 TranslationBuffer translations_;
345 ZoneList<LDeferredCode*> deferred_; 375 ZoneList<LDeferredCode*> deferred_;
346 bool dynamic_frame_alignment_; 376 bool dynamic_frame_alignment_;
347 bool support_aligned_spilled_doubles_; 377 bool support_aligned_spilled_doubles_;
348 int osr_pc_offset_; 378 int osr_pc_offset_;
349 bool frame_is_built_; 379 bool frame_is_built_;
350 380
381 class X87Stack {
382 public:
383 explicit X87Stack(MacroAssembler* masm)
384 : stack_depth_(0), is_mutable_(true), masm_(masm) { }
385 explicit X87Stack(const X87Stack& other)
386 : stack_depth_(other.stack_depth_), is_mutable_(false), masm_(masm()) {
387 for (int i = 0; i < stack_depth_; i++) {
388 stack_[i] = other.stack_[i];
389 }
390 }
391 bool operator==(const X87Stack& other) const {
392 if (stack_depth_ != other.stack_depth_) return false;
393 for (int i = 0; i < stack_depth_; i++) {
394 if (!stack_[i].is(other.stack_[i])) return false;
395 }
396 return true;
397 }
398 bool Contains(X87Register reg);
399 void Fxch(X87Register reg, int other_slot = 0);
400 void Free(X87Register reg);
401 void PrepareToWrite(X87Register reg);
402 void CommitWrite(X87Register reg);
403 void FlushIfNecessary(LInstruction* instr, LCodeGen* cgen);
404 void LeavingBlock(int current_block_id, LGoto* goto_instr);
405 int depth() const { return stack_depth_; }
406 void pop() {
407 ASSERT(is_mutable_);
408 stack_depth_--;
409 }
410 void push(X87Register reg) {
411 ASSERT(is_mutable_);
412 ASSERT(stack_depth_ < X87Register::kMaxNumAllocatableRegisters);
413 stack_[stack_depth_] = reg;
414 stack_depth_++;
415 }
416
417 MacroAssembler* masm() const { return masm_; }
418 Isolate* isolate() const { return masm_->isolate(); }
419
420 private:
421 int ArrayIndex(X87Register reg);
422 int st2idx(int pos);
423
424 X87Register stack_[X87Register::kMaxNumAllocatableRegisters];
425 int stack_depth_;
426 bool is_mutable_;
427 MacroAssembler* masm_;
428 };
429 X87Stack x87_stack_;
430
351 // Builder that keeps track of safepoints in the code. The table 431 // Builder that keeps track of safepoints in the code. The table
352 // itself is emitted at the end of the generated code. 432 // itself is emitted at the end of the generated code.
353 SafepointTableBuilder safepoints_; 433 SafepointTableBuilder safepoints_;
354 434
355 // Compiler from a set of parallel moves to a sequential list of moves. 435 // Compiler from a set of parallel moves to a sequential list of moves.
356 LGapResolver resolver_; 436 LGapResolver resolver_;
357 437
358 Safepoint::Kind expected_safepoint_kind_; 438 Safepoint::Kind expected_safepoint_kind_;
359 439
360 class PushSafepointRegistersScope V8_FINAL BASE_EMBEDDED { 440 class PushSafepointRegistersScope V8_FINAL BASE_EMBEDDED {
(...skipping 18 matching lines...) Expand all
379 459
380 friend class LDeferredCode; 460 friend class LDeferredCode;
381 friend class LEnvironment; 461 friend class LEnvironment;
382 friend class SafepointGenerator; 462 friend class SafepointGenerator;
383 DISALLOW_COPY_AND_ASSIGN(LCodeGen); 463 DISALLOW_COPY_AND_ASSIGN(LCodeGen);
384 }; 464 };
385 465
386 466
387 class LDeferredCode : public ZoneObject { 467 class LDeferredCode : public ZoneObject {
388 public: 468 public:
389 explicit LDeferredCode(LCodeGen* codegen) 469 explicit LDeferredCode(LCodeGen* codegen, const LCodeGen::X87Stack& x87_stack)
390 : codegen_(codegen), 470 : codegen_(codegen),
391 external_exit_(NULL), 471 external_exit_(NULL),
392 instruction_index_(codegen->current_instruction_) { 472 instruction_index_(codegen->current_instruction_),
473 x87_stack_(x87_stack) {
393 codegen->AddDeferredCode(this); 474 codegen->AddDeferredCode(this);
394 } 475 }
395 476
396 virtual ~LDeferredCode() {} 477 virtual ~LDeferredCode() {}
397 virtual void Generate() = 0; 478 virtual void Generate() = 0;
398 virtual LInstruction* instr() = 0; 479 virtual LInstruction* instr() = 0;
399 480
400 void SetExit(Label* exit) { external_exit_ = exit; } 481 void SetExit(Label* exit) { external_exit_ = exit; }
401 Label* entry() { return &entry_; } 482 Label* entry() { return &entry_; }
402 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; } 483 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
403 Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); } 484 Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); }
404 int instruction_index() const { return instruction_index_; } 485 int instruction_index() const { return instruction_index_; }
486 const LCodeGen::X87Stack& x87_stack() const { return x87_stack_; }
405 487
406 protected: 488 protected:
407 LCodeGen* codegen() const { return codegen_; } 489 LCodeGen* codegen() const { return codegen_; }
408 MacroAssembler* masm() const { return codegen_->masm(); } 490 MacroAssembler* masm() const { return codegen_->masm(); }
409 491
410 private: 492 private:
411 LCodeGen* codegen_; 493 LCodeGen* codegen_;
412 Label entry_; 494 Label entry_;
413 Label exit_; 495 Label exit_;
414 Label* external_exit_; 496 Label* external_exit_;
415 Label done_; 497 Label done_;
416 int instruction_index_; 498 int instruction_index_;
499 LCodeGen::X87Stack x87_stack_;
417 }; 500 };
418 501
419 } } // namespace v8::internal 502 } } // namespace v8::internal
420 503
421 #endif // V8_IA32_LITHIUM_CODEGEN_IA32_H_ 504 #endif // V8_X87_LITHIUM_CODEGEN_X87_H_
OLDNEW
« no previous file with comments | « src/x87/ic-x87.cc ('k') | src/x87/lithium-codegen-x87.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698