OLD | NEW |
| (Empty) |
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | |
2 // Redistribution and use in source and binary forms, with or without | |
3 // modification, are permitted provided that the following conditions are | |
4 // met: | |
5 // | |
6 // * Redistributions of source code must retain the above copyright | |
7 // notice, this list of conditions and the following disclaimer. | |
8 // * Redistributions in binary form must reproduce the above | |
9 // copyright notice, this list of conditions and the following | |
10 // disclaimer in the documentation and/or other materials provided | |
11 // with the distribution. | |
12 // * Neither the name of Google Inc. nor the names of its | |
13 // contributors may be used to endorse or promote products derived | |
14 // from this software without specific prior written permission. | |
15 // | |
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | |
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | |
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | |
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | |
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | |
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
27 | |
28 #ifndef V8_CODEGEN_IA32_H_ | |
29 #define V8_CODEGEN_IA32_H_ | |
30 | |
31 namespace v8 { namespace internal { | |
32 | |
33 // Forward declarations | |
34 class DeferredCode; | |
35 class RegisterAllocator; | |
36 class RegisterFile; | |
37 | |
38 enum InitState { CONST_INIT, NOT_CONST_INIT }; | |
39 enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF }; | |
40 | |
41 | |
42 // ------------------------------------------------------------------------- | |
43 // Reference support | |
44 | |
45 // A reference is a C++ stack-allocated object that keeps an ECMA | |
46 // reference on the execution stack while in scope. For variables | |
47 // the reference is empty, indicating that it isn't necessary to | |
48 // store state on the stack for keeping track of references to those. | |
49 // For properties, we keep either one (named) or two (indexed) values | |
50 // on the execution stack to represent the reference. | |
51 | |
52 class Reference BASE_EMBEDDED { | |
53 public: | |
54 // The values of the types is important, see size(). | |
55 enum Type { ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 }; | |
56 Reference(CodeGenerator* cgen, Expression* expression); | |
57 ~Reference(); | |
58 | |
59 Expression* expression() const { return expression_; } | |
60 Type type() const { return type_; } | |
61 void set_type(Type value) { | |
62 ASSERT(type_ == ILLEGAL); | |
63 type_ = value; | |
64 } | |
65 | |
66 // The size the reference takes up on the stack. | |
67 int size() const { return (type_ == ILLEGAL) ? 0 : type_; } | |
68 | |
69 bool is_illegal() const { return type_ == ILLEGAL; } | |
70 bool is_slot() const { return type_ == SLOT; } | |
71 bool is_property() const { return type_ == NAMED || type_ == KEYED; } | |
72 | |
73 // Return the name. Only valid for named property references. | |
74 Handle<String> GetName(); | |
75 | |
76 // Generate code to push the value of the reference on top of the | |
77 // expression stack. The reference is expected to be already on top of | |
78 // the expression stack, and it is left in place with its value above it. | |
79 void GetValue(TypeofState typeof_state); | |
80 | |
81 // Like GetValue except that the slot is expected to be written to before | |
82 // being read from again. Thae value of the reference may be invalidated, | |
83 // causing subsequent attempts to read it to fail. | |
84 void TakeValue(TypeofState typeof_state); | |
85 | |
86 // Generate code to store the value on top of the expression stack in the | |
87 // reference. The reference is expected to be immediately below the value | |
88 // on the expression stack. The stored value is left in place (with the | |
89 // reference intact below it) to support chained assignments. | |
90 void SetValue(InitState init_state); | |
91 | |
92 private: | |
93 CodeGenerator* cgen_; | |
94 Expression* expression_; | |
95 Type type_; | |
96 }; | |
97 | |
98 | |
99 // ------------------------------------------------------------------------- | |
100 // Control destinations. | |
101 | |
102 // A control destination encapsulates a pair of jump targets and a | |
103 // flag indicating which one is the preferred fall-through. The | |
104 // preferred fall-through must be unbound, the other may be already | |
105 // bound (ie, a backward target). | |
106 // | |
107 // The true and false targets may be jumped to unconditionally or | |
108 // control may split conditionally. Unconditional jumping and | |
109 // splitting should be emitted in tail position (as the last thing | |
110 // when compiling an expression) because they can cause either label | |
111 // to be bound or the non-fall through to be jumped to leaving an | |
112 // invalid virtual frame. | |
113 // | |
114 // The labels in the control destination can be extracted and | |
115 // manipulated normally without affecting the state of the | |
116 // destination. | |
117 | |
118 class ControlDestination BASE_EMBEDDED { | |
119 public: | |
120 ControlDestination(JumpTarget* true_target, | |
121 JumpTarget* false_target, | |
122 bool true_is_fall_through) | |
123 : true_target_(true_target), | |
124 false_target_(false_target), | |
125 true_is_fall_through_(true_is_fall_through), | |
126 is_used_(false) { | |
127 ASSERT(true_is_fall_through ? !true_target->is_bound() | |
128 : !false_target->is_bound()); | |
129 } | |
130 | |
131 // Accessors for the jump targets. Directly jumping or branching to | |
132 // or binding the targets will not update the destination's state. | |
133 JumpTarget* true_target() const { return true_target_; } | |
134 JumpTarget* false_target() const { return false_target_; } | |
135 | |
136 // True if the the destination has been jumped to unconditionally or | |
137 // control has been split to both targets. This predicate does not | |
138 // test whether the targets have been extracted and manipulated as | |
139 // raw jump targets. | |
140 bool is_used() const { return is_used_; } | |
141 | |
142 // True if the destination is used and the true target (respectively | |
143 // false target) was the fall through. If the target is backward, | |
144 // "fall through" included jumping unconditionally to it. | |
145 bool true_was_fall_through() const { | |
146 return is_used_ && true_is_fall_through_; | |
147 } | |
148 | |
149 bool false_was_fall_through() const { | |
150 return is_used_ && !true_is_fall_through_; | |
151 } | |
152 | |
153 // Emit a branch to one of the true or false targets, and bind the | |
154 // other target. Because this binds the fall-through target, it | |
155 // should be emitted in tail position (as the last thing when | |
156 // compiling an expression). | |
157 void Split(Condition cc) { | |
158 ASSERT(!is_used_); | |
159 if (true_is_fall_through_) { | |
160 false_target_->Branch(NegateCondition(cc)); | |
161 true_target_->Bind(); | |
162 } else { | |
163 true_target_->Branch(cc); | |
164 false_target_->Bind(); | |
165 } | |
166 is_used_ = true; | |
167 } | |
168 | |
169 // Emit an unconditional jump in tail position, to the true target | |
170 // (if the argument is true) or the false target. The "jump" will | |
171 // actually bind the jump target if it is forward, jump to it if it | |
172 // is backward. | |
173 void Goto(bool where) { | |
174 ASSERT(!is_used_); | |
175 JumpTarget* target = where ? true_target_ : false_target_; | |
176 if (target->is_bound()) { | |
177 target->Jump(); | |
178 } else { | |
179 target->Bind(); | |
180 } | |
181 is_used_ = true; | |
182 true_is_fall_through_ = where; | |
183 } | |
184 | |
185 // Mark this jump target as used as if Goto had been called, but | |
186 // without generating a jump or binding a label (the control effect | |
187 // should have already happened). This is used when the left | |
188 // subexpression of the short-circuit boolean operators are | |
189 // compiled. | |
190 void Use(bool where) { | |
191 ASSERT(!is_used_); | |
192 ASSERT((where ? true_target_ : false_target_)->is_bound()); | |
193 is_used_ = true; | |
194 true_is_fall_through_ = where; | |
195 } | |
196 | |
197 // Swap the true and false targets but keep the same actual label as | |
198 // the fall through. This is used when compiling negated | |
199 // expressions, where we want to swap the targets but preserve the | |
200 // state. | |
201 void Invert() { | |
202 JumpTarget* temp_target = true_target_; | |
203 true_target_ = false_target_; | |
204 false_target_ = temp_target; | |
205 | |
206 true_is_fall_through_ = !true_is_fall_through_; | |
207 } | |
208 | |
209 private: | |
210 // True and false jump targets. | |
211 JumpTarget* true_target_; | |
212 JumpTarget* false_target_; | |
213 | |
214 // Before using the destination: true if the true target is the | |
215 // preferred fall through, false if the false target is. After | |
216 // using the destination: true if the true target was actually used | |
217 // as the fall through, false if the false target was. | |
218 bool true_is_fall_through_; | |
219 | |
220 // True if the Split or Goto functions have been called. | |
221 bool is_used_; | |
222 }; | |
223 | |
224 | |
225 // ------------------------------------------------------------------------- | |
226 // Code generation state | |
227 | |
228 // The state is passed down the AST by the code generator (and back up, in | |
229 // the form of the state of the jump target pair). It is threaded through | |
230 // the call stack. Constructing a state implicitly pushes it on the owning | |
231 // code generator's stack of states, and destroying one implicitly pops it. | |
232 // | |
233 // The code generator state is only used for expressions, so statements have | |
234 // the initial state. | |
235 | |
236 class CodeGenState BASE_EMBEDDED { | |
237 public: | |
238 // Create an initial code generator state. Destroying the initial state | |
239 // leaves the code generator with a NULL state. | |
240 explicit CodeGenState(CodeGenerator* owner); | |
241 | |
242 // Create a code generator state based on a code generator's current | |
243 // state. The new state may or may not be inside a typeof, and has its | |
244 // own control destination. | |
245 CodeGenState(CodeGenerator* owner, | |
246 TypeofState typeof_state, | |
247 ControlDestination* destination); | |
248 | |
249 // Destroy a code generator state and restore the owning code generator's | |
250 // previous state. | |
251 ~CodeGenState(); | |
252 | |
253 // Accessors for the state. | |
254 TypeofState typeof_state() const { return typeof_state_; } | |
255 ControlDestination* destination() const { return destination_; } | |
256 | |
257 private: | |
258 // The owning code generator. | |
259 CodeGenerator* owner_; | |
260 | |
261 // A flag indicating whether we are compiling the immediate subexpression | |
262 // of a typeof expression. | |
263 TypeofState typeof_state_; | |
264 | |
265 // A control destination in case the expression has a control-flow | |
266 // effect. | |
267 ControlDestination* destination_; | |
268 | |
269 // The previous state of the owning code generator, restored when | |
270 // this state is destroyed. | |
271 CodeGenState* previous_; | |
272 }; | |
273 | |
274 | |
275 | |
276 | |
277 // ------------------------------------------------------------------------- | |
278 // CodeGenerator | |
279 | |
280 class CodeGenerator: public AstVisitor { | |
281 public: | |
282 // Takes a function literal, generates code for it. This function should only | |
283 // be called by compiler.cc. | |
284 static Handle<Code> MakeCode(FunctionLiteral* fun, | |
285 Handle<Script> script, | |
286 bool is_eval); | |
287 | |
288 #ifdef ENABLE_LOGGING_AND_PROFILING | |
289 static bool ShouldGenerateLog(Expression* type); | |
290 #endif | |
291 | |
292 static void SetFunctionInfo(Handle<JSFunction> fun, | |
293 int length, | |
294 int function_token_position, | |
295 int start_position, | |
296 int end_position, | |
297 bool is_expression, | |
298 bool is_toplevel, | |
299 Handle<Script> script, | |
300 Handle<String> inferred_name); | |
301 | |
302 // Accessors | |
303 MacroAssembler* masm() { return masm_; } | |
304 | |
305 VirtualFrame* frame() const { return frame_; } | |
306 | |
307 bool has_valid_frame() const { return frame_ != NULL; } | |
308 | |
309 // Set the virtual frame to be new_frame, with non-frame register | |
310 // reference counts given by non_frame_registers. The non-frame | |
311 // register reference counts of the old frame are returned in | |
312 // non_frame_registers. | |
313 void SetFrame(VirtualFrame* new_frame, RegisterFile* non_frame_registers); | |
314 | |
315 void DeleteFrame(); | |
316 | |
317 RegisterAllocator* allocator() const { return allocator_; } | |
318 | |
319 CodeGenState* state() { return state_; } | |
320 void set_state(CodeGenState* state) { state_ = state; } | |
321 | |
322 void AddDeferred(DeferredCode* code) { deferred_.Add(code); } | |
323 | |
324 bool in_spilled_code() const { return in_spilled_code_; } | |
325 void set_in_spilled_code(bool flag) { in_spilled_code_ = flag; } | |
326 | |
327 private: | |
328 // Construction/Destruction | |
329 CodeGenerator(int buffer_size, Handle<Script> script, bool is_eval); | |
330 virtual ~CodeGenerator() { delete masm_; } | |
331 | |
332 // Accessors | |
333 Scope* scope() const { return scope_; } | |
334 | |
335 // Clearing and generating deferred code. | |
336 void ClearDeferred(); | |
337 void ProcessDeferred(); | |
338 | |
339 bool is_eval() { return is_eval_; } | |
340 | |
341 // State | |
342 TypeofState typeof_state() const { return state_->typeof_state(); } | |
343 ControlDestination* destination() const { return state_->destination(); } | |
344 | |
345 // Track loop nesting level. | |
346 int loop_nesting() const { return loop_nesting_; } | |
347 void IncrementLoopNesting() { loop_nesting_++; } | |
348 void DecrementLoopNesting() { loop_nesting_--; } | |
349 | |
350 | |
351 // Node visitors. | |
352 void VisitStatements(ZoneList<Statement*>* statements); | |
353 | |
354 #define DEF_VISIT(type) \ | |
355 void Visit##type(type* node); | |
356 NODE_LIST(DEF_VISIT) | |
357 #undef DEF_VISIT | |
358 | |
359 // Visit a statement and then spill the virtual frame if control flow can | |
360 // reach the end of the statement (ie, it does not exit via break, | |
361 // continue, return, or throw). This function is used temporarily while | |
362 // the code generator is being transformed. | |
363 void VisitAndSpill(Statement* statement); | |
364 | |
365 // Visit a list of statements and then spill the virtual frame if control | |
366 // flow can reach the end of the list. | |
367 void VisitStatementsAndSpill(ZoneList<Statement*>* statements); | |
368 | |
369 // Main code generation function | |
370 void GenCode(FunctionLiteral* fun); | |
371 | |
372 // Generate the return sequence code. Should be called no more than | |
373 // once per compiled function, immediately after binding the return | |
374 // target (which can not be done more than once). | |
375 void GenerateReturnSequence(Result* return_value); | |
376 | |
377 // The following are used by class Reference. | |
378 void LoadReference(Reference* ref); | |
379 void UnloadReference(Reference* ref); | |
380 | |
381 Operand ContextOperand(Register context, int index) const { | |
382 return Operand(context, Context::SlotOffset(index)); | |
383 } | |
384 | |
385 Operand SlotOperand(Slot* slot, Register tmp); | |
386 | |
387 Operand ContextSlotOperandCheckExtensions(Slot* slot, | |
388 Result tmp, | |
389 JumpTarget* slow); | |
390 | |
391 // Expressions | |
392 Operand GlobalObject() const { | |
393 return ContextOperand(esi, Context::GLOBAL_INDEX); | |
394 } | |
395 | |
396 void LoadCondition(Expression* x, | |
397 TypeofState typeof_state, | |
398 ControlDestination* destination, | |
399 bool force_control); | |
400 void Load(Expression* x, TypeofState typeof_state = NOT_INSIDE_TYPEOF); | |
401 void LoadGlobal(); | |
402 void LoadGlobalReceiver(); | |
403 | |
404 // Generate code to push the value of an expression on top of the frame | |
405 // and then spill the frame fully to memory. This function is used | |
406 // temporarily while the code generator is being transformed. | |
407 void LoadAndSpill(Expression* expression, | |
408 TypeofState typeof_state = NOT_INSIDE_TYPEOF); | |
409 | |
410 // Read a value from a slot and leave it on top of the expression stack. | |
411 void LoadFromSlot(Slot* slot, TypeofState typeof_state); | |
412 Result LoadFromGlobalSlotCheckExtensions(Slot* slot, | |
413 TypeofState typeof_state, | |
414 JumpTarget* slow); | |
415 | |
416 // Store the value on top of the expression stack into a slot, leaving the | |
417 // value in place. | |
418 void StoreToSlot(Slot* slot, InitState init_state); | |
419 | |
420 // Special code for typeof expressions: Unfortunately, we must | |
421 // be careful when loading the expression in 'typeof' | |
422 // expressions. We are not allowed to throw reference errors for | |
423 // non-existing properties of the global object, so we must make it | |
424 // look like an explicit property access, instead of an access | |
425 // through the context chain. | |
426 void LoadTypeofExpression(Expression* x); | |
427 | |
428 // Translate the value on top of the frame into control flow to the | |
429 // control destination. | |
430 void ToBoolean(ControlDestination* destination); | |
431 | |
432 void GenericBinaryOperation( | |
433 Token::Value op, | |
434 SmiAnalysis* type, | |
435 OverwriteMode overwrite_mode); | |
436 | |
437 // If possible, combine two constant smi values using op to produce | |
438 // a smi result, and push it on the virtual frame, all at compile time. | |
439 // Returns true if it succeeds. Otherwise it has no effect. | |
440 bool FoldConstantSmis(Token::Value op, int left, int right); | |
441 | |
442 // Emit code to perform a binary operation on a constant | |
443 // smi and a likely smi. Consumes the Result *operand. | |
444 void ConstantSmiBinaryOperation(Token::Value op, | |
445 Result* operand, | |
446 Handle<Object> constant_operand, | |
447 SmiAnalysis* type, | |
448 bool reversed, | |
449 OverwriteMode overwrite_mode); | |
450 | |
451 // Emit code to perform a binary operation on two likely smis. | |
452 // The code to handle smi arguments is produced inline. | |
453 // Consumes the Results *left and *right. | |
454 void LikelySmiBinaryOperation(Token::Value op, | |
455 Result* left, | |
456 Result* right, | |
457 OverwriteMode overwrite_mode); | |
458 | |
459 void Comparison(Condition cc, | |
460 bool strict, | |
461 ControlDestination* destination); | |
462 | |
463 // To prevent long attacker-controlled byte sequences, integer constants | |
464 // from the JavaScript source are loaded in two parts if they are larger | |
465 // than 16 bits. | |
466 static const int kMaxSmiInlinedBits = 16; | |
467 bool IsUnsafeSmi(Handle<Object> value); | |
468 // Load an integer constant x into a register target using | |
469 // at most 16 bits of user-controlled data per assembly operation. | |
470 void LoadUnsafeSmi(Register target, Handle<Object> value); | |
471 | |
472 void CallWithArguments(ZoneList<Expression*>* arguments, int position); | |
473 | |
474 void CheckStack(); | |
475 | |
476 bool CheckForInlineRuntimeCall(CallRuntime* node); | |
477 Handle<JSFunction> BuildBoilerplate(FunctionLiteral* node); | |
478 void ProcessDeclarations(ZoneList<Declaration*>* declarations); | |
479 | |
480 Handle<Code> ComputeCallInitialize(int argc); | |
481 Handle<Code> ComputeCallInitializeInLoop(int argc); | |
482 | |
483 // Declare global variables and functions in the given array of | |
484 // name/value pairs. | |
485 void DeclareGlobals(Handle<FixedArray> pairs); | |
486 | |
487 // Instantiate the function boilerplate. | |
488 void InstantiateBoilerplate(Handle<JSFunction> boilerplate); | |
489 | |
490 // Support for type checks. | |
491 void GenerateIsSmi(ZoneList<Expression*>* args); | |
492 void GenerateIsNonNegativeSmi(ZoneList<Expression*>* args); | |
493 void GenerateIsArray(ZoneList<Expression*>* args); | |
494 | |
495 // Support for arguments.length and arguments[?]. | |
496 void GenerateArgumentsLength(ZoneList<Expression*>* args); | |
497 void GenerateArgumentsAccess(ZoneList<Expression*>* args); | |
498 | |
499 // Support for accessing the value field of an object (used by Date). | |
500 void GenerateValueOf(ZoneList<Expression*>* args); | |
501 void GenerateSetValueOf(ZoneList<Expression*>* args); | |
502 | |
503 // Fast support for charCodeAt(n). | |
504 void GenerateFastCharCodeAt(ZoneList<Expression*>* args); | |
505 | |
506 // Fast support for object equality testing. | |
507 void GenerateObjectEquals(ZoneList<Expression*>* args); | |
508 | |
509 void GenerateLog(ZoneList<Expression*>* args); | |
510 | |
511 | |
512 // Methods and constants for fast case switch statement support. | |
513 // | |
514 // Only allow fast-case switch if the range of labels is at most | |
515 // this factor times the number of case labels. | |
516 // Value is derived from comparing the size of code generated by the normal | |
517 // switch code for Smi-labels to the size of a single pointer. If code | |
518 // quality increases this number should be decreased to match. | |
519 static const int kFastSwitchMaxOverheadFactor = 5; | |
520 | |
521 // Minimal number of switch cases required before we allow jump-table | |
522 // optimization. | |
523 static const int kFastSwitchMinCaseCount = 5; | |
524 | |
525 // The limit of the range of a fast-case switch, as a factor of the number | |
526 // of cases of the switch. Each platform should return a value that | |
527 // is optimal compared to the default code generated for a switch statement | |
528 // on that platform. | |
529 int FastCaseSwitchMaxOverheadFactor(); | |
530 | |
531 // The minimal number of cases in a switch before the fast-case switch | |
532 // optimization is enabled. Each platform should return a value that | |
533 // is optimal compared to the default code generated for a switch statement | |
534 // on that platform. | |
535 int FastCaseSwitchMinCaseCount(); | |
536 | |
537 // Allocate a jump table and create code to jump through it. | |
538 // Should call GenerateFastCaseSwitchCases to generate the code for | |
539 // all the cases at the appropriate point. | |
540 void GenerateFastCaseSwitchJumpTable(SwitchStatement* node, | |
541 int min_index, | |
542 int range, | |
543 Label* fail_label, | |
544 Vector<Label*> case_targets, | |
545 Vector<Label> case_labels); | |
546 | |
547 // Generate the code for cases for the fast case switch. | |
548 // Called by GenerateFastCaseSwitchJumpTable. | |
549 void GenerateFastCaseSwitchCases(SwitchStatement* node, | |
550 Vector<Label> case_labels, | |
551 VirtualFrame* start_frame); | |
552 | |
553 // Fast support for constant-Smi switches. | |
554 void GenerateFastCaseSwitchStatement(SwitchStatement* node, | |
555 int min_index, | |
556 int range, | |
557 int default_index); | |
558 | |
559 // Fast support for constant-Smi switches. Tests whether switch statement | |
560 // permits optimization and calls GenerateFastCaseSwitch if it does. | |
561 // Returns true if the fast-case switch was generated, and false if not. | |
562 bool TryGenerateFastCaseSwitchStatement(SwitchStatement* node); | |
563 | |
564 // Methods used to indicate which source code is generated for. Source | |
565 // positions are collected by the assembler and emitted with the relocation | |
566 // information. | |
567 void CodeForFunctionPosition(FunctionLiteral* fun); | |
568 void CodeForReturnPosition(FunctionLiteral* fun); | |
569 void CodeForStatementPosition(Node* node); | |
570 void CodeForSourcePosition(int pos); | |
571 | |
572 #ifdef DEBUG | |
573 // True if the registers are valid for entry to a block. There should be | |
574 // no frame-external references to eax, ebx, ecx, edx, or edi. | |
575 bool HasValidEntryRegisters(); | |
576 #endif | |
577 | |
578 bool is_eval_; // Tells whether code is generated for eval. | |
579 Handle<Script> script_; | |
580 List<DeferredCode*> deferred_; | |
581 | |
582 // Assembler | |
583 MacroAssembler* masm_; // to generate code | |
584 | |
585 // Code generation state | |
586 Scope* scope_; | |
587 VirtualFrame* frame_; | |
588 RegisterAllocator* allocator_; | |
589 CodeGenState* state_; | |
590 int loop_nesting_; | |
591 | |
592 // Jump targets. | |
593 // The target of the return from the function. | |
594 BreakTarget function_return_; | |
595 | |
596 // True if the function return is shadowed (ie, jumping to the target | |
597 // function_return_ does not jump to the true function return, but rather | |
598 // to some unlinking code). | |
599 bool function_return_is_shadowed_; | |
600 | |
601 // True when we are in code that expects the virtual frame to be fully | |
602 // spilled. Some virtual frame function are disabled in DEBUG builds when | |
603 // called from spilled code, because they do not leave the virtual frame | |
604 // in a spilled state. | |
605 bool in_spilled_code_; | |
606 | |
607 friend class VirtualFrame; | |
608 friend class JumpTarget; | |
609 friend class Reference; | |
610 friend class Result; | |
611 | |
612 DISALLOW_COPY_AND_ASSIGN(CodeGenerator); | |
613 }; | |
614 | |
615 | |
616 } } // namespace v8::internal | |
617 | |
618 #endif // V8_CODEGEN_IA32_H_ | |
OLD | NEW |