| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| 11 // with the distribution. | 11 // with the distribution. |
| (...skipping 11 matching lines...) Expand all Loading... |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #ifndef V8_IA32_CODEGEN_IA32_H_ | 28 #ifndef V8_IA32_CODEGEN_IA32_H_ |
| 29 #define V8_IA32_CODEGEN_IA32_H_ | 29 #define V8_IA32_CODEGEN_IA32_H_ |
| 30 | 30 |
| 31 #include "ast.h" | 31 #include "ast.h" |
| 32 #include "ic-inl.h" | 32 #include "ic-inl.h" |
| 33 #include "jump-target-heavy.h" | |
| 34 | 33 |
| 35 namespace v8 { | 34 namespace v8 { |
| 36 namespace internal { | 35 namespace internal { |
| 37 | 36 |
| 38 // Forward declarations | 37 // Forward declarations |
| 39 class CompilationInfo; | 38 class CompilationInfo; |
| 40 class DeferredCode; | |
| 41 class FrameRegisterState; | |
| 42 class RegisterAllocator; | |
| 43 class RegisterFile; | |
| 44 class RuntimeCallHelper; | |
| 45 | |
| 46 | |
| 47 // ------------------------------------------------------------------------- | |
| 48 // Reference support | |
| 49 | |
| 50 // A reference is a C++ stack-allocated object that puts a | |
| 51 // reference on the virtual frame. The reference may be consumed | |
| 52 // by GetValue, TakeValue and SetValue. | |
| 53 // When the lifetime (scope) of a valid reference ends, it must have | |
| 54 // been consumed, and be in state UNLOADED. | |
| 55 class Reference BASE_EMBEDDED { | |
| 56 public: | |
| 57 // The values of the types is important, see size(). | |
| 58 enum Type { UNLOADED = -2, ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 }; | |
| 59 Reference(CodeGenerator* cgen, | |
| 60 Expression* expression, | |
| 61 bool persist_after_get = false); | |
| 62 ~Reference(); | |
| 63 | |
| 64 Expression* expression() const { return expression_; } | |
| 65 Type type() const { return type_; } | |
| 66 void set_type(Type value) { | |
| 67 ASSERT_EQ(ILLEGAL, type_); | |
| 68 type_ = value; | |
| 69 } | |
| 70 | |
| 71 void set_unloaded() { | |
| 72 ASSERT_NE(ILLEGAL, type_); | |
| 73 ASSERT_NE(UNLOADED, type_); | |
| 74 type_ = UNLOADED; | |
| 75 } | |
| 76 // The size the reference takes up on the stack. | |
| 77 int size() const { | |
| 78 return (type_ < SLOT) ? 0 : type_; | |
| 79 } | |
| 80 | |
| 81 bool is_illegal() const { return type_ == ILLEGAL; } | |
| 82 bool is_slot() const { return type_ == SLOT; } | |
| 83 bool is_property() const { return type_ == NAMED || type_ == KEYED; } | |
| 84 bool is_unloaded() const { return type_ == UNLOADED; } | |
| 85 | |
| 86 // Return the name. Only valid for named property references. | |
| 87 Handle<String> GetName(); | |
| 88 | |
| 89 // Generate code to push the value of the reference on top of the | |
| 90 // expression stack. The reference is expected to be already on top of | |
| 91 // the expression stack, and it is consumed by the call unless the | |
| 92 // reference is for a compound assignment. | |
| 93 // If the reference is not consumed, it is left in place under its value. | |
| 94 void GetValue(); | |
| 95 | |
| 96 // Like GetValue except that the slot is expected to be written to before | |
| 97 // being read from again. The value of the reference may be invalidated, | |
| 98 // causing subsequent attempts to read it to fail. | |
| 99 void TakeValue(); | |
| 100 | |
| 101 // Generate code to store the value on top of the expression stack in the | |
| 102 // reference. The reference is expected to be immediately below the value | |
| 103 // on the expression stack. The value is stored in the location specified | |
| 104 // by the reference, and is left on top of the stack, after the reference | |
| 105 // is popped from beneath it (unloaded). | |
| 106 void SetValue(InitState init_state); | |
| 107 | |
| 108 private: | |
| 109 CodeGenerator* cgen_; | |
| 110 Expression* expression_; | |
| 111 Type type_; | |
| 112 // Keep the reference on the stack after get, so it can be used by set later. | |
| 113 bool persist_after_get_; | |
| 114 }; | |
| 115 | |
| 116 | |
| 117 // ------------------------------------------------------------------------- | |
| 118 // Control destinations. | |
| 119 | |
| 120 // A control destination encapsulates a pair of jump targets and a | |
| 121 // flag indicating which one is the preferred fall-through. The | |
| 122 // preferred fall-through must be unbound, the other may be already | |
| 123 // bound (ie, a backward target). | |
| 124 // | |
| 125 // The true and false targets may be jumped to unconditionally or | |
| 126 // control may split conditionally. Unconditional jumping and | |
| 127 // splitting should be emitted in tail position (as the last thing | |
| 128 // when compiling an expression) because they can cause either label | |
| 129 // to be bound or the non-fall through to be jumped to leaving an | |
| 130 // invalid virtual frame. | |
| 131 // | |
| 132 // The labels in the control destination can be extracted and | |
| 133 // manipulated normally without affecting the state of the | |
| 134 // destination. | |
| 135 | |
| 136 class ControlDestination BASE_EMBEDDED { | |
| 137 public: | |
| 138 ControlDestination(JumpTarget* true_target, | |
| 139 JumpTarget* false_target, | |
| 140 bool true_is_fall_through) | |
| 141 : true_target_(true_target), | |
| 142 false_target_(false_target), | |
| 143 true_is_fall_through_(true_is_fall_through), | |
| 144 is_used_(false) { | |
| 145 ASSERT(true_is_fall_through ? !true_target->is_bound() | |
| 146 : !false_target->is_bound()); | |
| 147 } | |
| 148 | |
| 149 // Accessors for the jump targets. Directly jumping or branching to | |
| 150 // or binding the targets will not update the destination's state. | |
| 151 JumpTarget* true_target() const { return true_target_; } | |
| 152 JumpTarget* false_target() const { return false_target_; } | |
| 153 | |
| 154 // True if the the destination has been jumped to unconditionally or | |
| 155 // control has been split to both targets. This predicate does not | |
| 156 // test whether the targets have been extracted and manipulated as | |
| 157 // raw jump targets. | |
| 158 bool is_used() const { return is_used_; } | |
| 159 | |
| 160 // True if the destination is used and the true target (respectively | |
| 161 // false target) was the fall through. If the target is backward, | |
| 162 // "fall through" included jumping unconditionally to it. | |
| 163 bool true_was_fall_through() const { | |
| 164 return is_used_ && true_is_fall_through_; | |
| 165 } | |
| 166 | |
| 167 bool false_was_fall_through() const { | |
| 168 return is_used_ && !true_is_fall_through_; | |
| 169 } | |
| 170 | |
| 171 // Emit a branch to one of the true or false targets, and bind the | |
| 172 // other target. Because this binds the fall-through target, it | |
| 173 // should be emitted in tail position (as the last thing when | |
| 174 // compiling an expression). | |
| 175 void Split(Condition cc) { | |
| 176 ASSERT(!is_used_); | |
| 177 if (true_is_fall_through_) { | |
| 178 false_target_->Branch(NegateCondition(cc)); | |
| 179 true_target_->Bind(); | |
| 180 } else { | |
| 181 true_target_->Branch(cc); | |
| 182 false_target_->Bind(); | |
| 183 } | |
| 184 is_used_ = true; | |
| 185 } | |
| 186 | |
| 187 // Emit an unconditional jump in tail position, to the true target | |
| 188 // (if the argument is true) or the false target. The "jump" will | |
| 189 // actually bind the jump target if it is forward, jump to it if it | |
| 190 // is backward. | |
| 191 void Goto(bool where) { | |
| 192 ASSERT(!is_used_); | |
| 193 JumpTarget* target = where ? true_target_ : false_target_; | |
| 194 if (target->is_bound()) { | |
| 195 target->Jump(); | |
| 196 } else { | |
| 197 target->Bind(); | |
| 198 } | |
| 199 is_used_ = true; | |
| 200 true_is_fall_through_ = where; | |
| 201 } | |
| 202 | |
| 203 // Mark this jump target as used as if Goto had been called, but | |
| 204 // without generating a jump or binding a label (the control effect | |
| 205 // should have already happened). This is used when the left | |
| 206 // subexpression of the short-circuit boolean operators are | |
| 207 // compiled. | |
| 208 void Use(bool where) { | |
| 209 ASSERT(!is_used_); | |
| 210 ASSERT((where ? true_target_ : false_target_)->is_bound()); | |
| 211 is_used_ = true; | |
| 212 true_is_fall_through_ = where; | |
| 213 } | |
| 214 | |
| 215 // Swap the true and false targets but keep the same actual label as | |
| 216 // the fall through. This is used when compiling negated | |
| 217 // expressions, where we want to swap the targets but preserve the | |
| 218 // state. | |
| 219 void Invert() { | |
| 220 JumpTarget* temp_target = true_target_; | |
| 221 true_target_ = false_target_; | |
| 222 false_target_ = temp_target; | |
| 223 | |
| 224 true_is_fall_through_ = !true_is_fall_through_; | |
| 225 } | |
| 226 | |
| 227 private: | |
| 228 // True and false jump targets. | |
| 229 JumpTarget* true_target_; | |
| 230 JumpTarget* false_target_; | |
| 231 | |
| 232 // Before using the destination: true if the true target is the | |
| 233 // preferred fall through, false if the false target is. After | |
| 234 // using the destination: true if the true target was actually used | |
| 235 // as the fall through, false if the false target was. | |
| 236 bool true_is_fall_through_; | |
| 237 | |
| 238 // True if the Split or Goto functions have been called. | |
| 239 bool is_used_; | |
| 240 }; | |
| 241 | |
| 242 | |
| 243 // ------------------------------------------------------------------------- | |
| 244 // Code generation state | |
| 245 | |
| 246 // The state is passed down the AST by the code generator (and back up, in | |
| 247 // the form of the state of the jump target pair). It is threaded through | |
| 248 // the call stack. Constructing a state implicitly pushes it on the owning | |
| 249 // code generator's stack of states, and destroying one implicitly pops it. | |
| 250 // | |
| 251 // The code generator state is only used for expressions, so statements have | |
| 252 // the initial state. | |
| 253 | |
| 254 class CodeGenState BASE_EMBEDDED { | |
| 255 public: | |
| 256 // Create an initial code generator state. Destroying the initial state | |
| 257 // leaves the code generator with a NULL state. | |
| 258 explicit CodeGenState(CodeGenerator* owner); | |
| 259 | |
| 260 // Create a code generator state based on a code generator's current | |
| 261 // state. The new state has its own control destination. | |
| 262 CodeGenState(CodeGenerator* owner, ControlDestination* destination); | |
| 263 | |
| 264 // Destroy a code generator state and restore the owning code generator's | |
| 265 // previous state. | |
| 266 ~CodeGenState(); | |
| 267 | |
| 268 // Accessors for the state. | |
| 269 ControlDestination* destination() const { return destination_; } | |
| 270 | |
| 271 private: | |
| 272 // The owning code generator. | |
| 273 CodeGenerator* owner_; | |
| 274 | |
| 275 // A control destination in case the expression has a control-flow | |
| 276 // effect. | |
| 277 ControlDestination* destination_; | |
| 278 | |
| 279 // The previous state of the owning code generator, restored when | |
| 280 // this state is destroyed. | |
| 281 CodeGenState* previous_; | |
| 282 }; | |
| 283 | |
| 284 | |
| 285 // ------------------------------------------------------------------------- | |
| 286 // Arguments allocation mode. | |
| 287 | |
| 288 enum ArgumentsAllocationMode { | |
| 289 NO_ARGUMENTS_ALLOCATION, | |
| 290 EAGER_ARGUMENTS_ALLOCATION, | |
| 291 LAZY_ARGUMENTS_ALLOCATION | |
| 292 }; | |
| 293 | |
| 294 | 39 |
| 295 // ------------------------------------------------------------------------- | 40 // ------------------------------------------------------------------------- |
| 296 // CodeGenerator | 41 // CodeGenerator |
| 297 | 42 |
| 298 class CodeGenerator: public AstVisitor { | 43 class CodeGenerator { |
| 299 public: | 44 public: |
| 300 static bool MakeCode(CompilationInfo* info); | |
| 301 | |
| 302 // Printing of AST, etc. as requested by flags. | 45 // Printing of AST, etc. as requested by flags. |
| 303 static void MakeCodePrologue(CompilationInfo* info); | 46 static void MakeCodePrologue(CompilationInfo* info); |
| 304 | 47 |
| 305 // Allocate and install the code. | 48 // Allocate and install the code. |
| 306 static Handle<Code> MakeCodeEpilogue(MacroAssembler* masm, | 49 static Handle<Code> MakeCodeEpilogue(MacroAssembler* masm, |
| 307 Code::Flags flags, | 50 Code::Flags flags, |
| 308 CompilationInfo* info); | 51 CompilationInfo* info); |
| 309 | 52 |
| 310 // Print the code after compiling it. | 53 // Print the code after compiling it. |
| 311 static void PrintCode(Handle<Code> code, CompilationInfo* info); | 54 static void PrintCode(Handle<Code> code, CompilationInfo* info); |
| 312 | 55 |
| 313 #ifdef ENABLE_LOGGING_AND_PROFILING | 56 #ifdef ENABLE_LOGGING_AND_PROFILING |
| 314 static bool ShouldGenerateLog(Expression* type); | 57 static bool ShouldGenerateLog(Expression* type); |
| 315 #endif | 58 #endif |
| 316 | 59 |
| 317 static bool RecordPositions(MacroAssembler* masm, | 60 static bool RecordPositions(MacroAssembler* masm, |
| 318 int pos, | 61 int pos, |
| 319 bool right_here = false); | 62 bool right_here = false); |
| 320 | 63 |
| 321 // Accessors | |
| 322 MacroAssembler* masm() { return masm_; } | |
| 323 VirtualFrame* frame() const { return frame_; } | |
| 324 inline Handle<Script> script(); | |
| 325 | 64 |
| 326 bool has_valid_frame() const { return frame_ != NULL; } | |
| 327 | |
| 328 // Set the virtual frame to be new_frame, with non-frame register | |
| 329 // reference counts given by non_frame_registers. The non-frame | |
| 330 // register reference counts of the old frame are returned in | |
| 331 // non_frame_registers. | |
| 332 void SetFrame(VirtualFrame* new_frame, RegisterFile* non_frame_registers); | |
| 333 | |
| 334 void DeleteFrame(); | |
| 335 | |
| 336 RegisterAllocator* allocator() const { return allocator_; } | |
| 337 | |
| 338 CodeGenState* state() { return state_; } | |
| 339 void set_state(CodeGenState* state) { state_ = state; } | |
| 340 | |
| 341 void AddDeferred(DeferredCode* code) { deferred_.Add(code); } | |
| 342 | |
| 343 bool in_spilled_code() const { return in_spilled_code_; } | |
| 344 void set_in_spilled_code(bool flag) { in_spilled_code_ = flag; } | |
| 345 | |
| 346 // Return a position of the element at |index_as_smi| + |additional_offset| | |
| 347 // in FixedArray pointer to which is held in |array|. |index_as_smi| is Smi. | |
| 348 static Operand FixedArrayElementOperand(Register array, | 65 static Operand FixedArrayElementOperand(Register array, |
| 349 Register index_as_smi, | 66 Register index_as_smi, |
| 350 int additional_offset = 0) { | 67 int additional_offset = 0) { |
| 351 int offset = FixedArray::kHeaderSize + additional_offset * kPointerSize; | 68 int offset = FixedArray::kHeaderSize + additional_offset * kPointerSize; |
| 352 return FieldOperand(array, index_as_smi, times_half_pointer_size, offset); | 69 return FieldOperand(array, index_as_smi, times_half_pointer_size, offset); |
| 353 } | 70 } |
| 354 | 71 |
| 355 private: | 72 private: |
| 356 // Type of a member function that generates inline code for a native function. | |
| 357 typedef void (CodeGenerator::*InlineFunctionGenerator) | |
| 358 (ZoneList<Expression*>*); | |
| 359 | |
| 360 static const InlineFunctionGenerator kInlineFunctionGenerators[]; | |
| 361 | |
| 362 // Construction/Destruction | |
| 363 explicit CodeGenerator(MacroAssembler* masm); | |
| 364 | |
| 365 // Accessors | |
| 366 inline bool is_eval(); | |
| 367 inline Scope* scope(); | |
| 368 inline bool is_strict_mode(); | |
| 369 inline StrictModeFlag strict_mode_flag(); | |
| 370 | |
| 371 // Generating deferred code. | |
| 372 void ProcessDeferred(); | |
| 373 | |
| 374 // State | |
| 375 ControlDestination* destination() const { return state_->destination(); } | |
| 376 | |
| 377 // Control of side-effect-free int32 expression compilation. | |
| 378 bool in_safe_int32_mode() { return in_safe_int32_mode_; } | |
| 379 void set_in_safe_int32_mode(bool value) { in_safe_int32_mode_ = value; } | |
| 380 bool safe_int32_mode_enabled() { | |
| 381 return FLAG_safe_int32_compiler && safe_int32_mode_enabled_; | |
| 382 } | |
| 383 void set_safe_int32_mode_enabled(bool value) { | |
| 384 safe_int32_mode_enabled_ = value; | |
| 385 } | |
| 386 void set_unsafe_bailout(BreakTarget* unsafe_bailout) { | |
| 387 unsafe_bailout_ = unsafe_bailout; | |
| 388 } | |
| 389 | |
| 390 // Take the Result that is an untagged int32, and convert it to a tagged | |
| 391 // Smi or HeapNumber. Remove the untagged_int32 flag from the result. | |
| 392 void ConvertInt32ResultToNumber(Result* value); | |
| 393 void ConvertInt32ResultToSmi(Result* value); | |
| 394 | |
| 395 // Track loop nesting level. | |
| 396 int loop_nesting() const { return loop_nesting_; } | |
| 397 void IncrementLoopNesting() { loop_nesting_++; } | |
| 398 void DecrementLoopNesting() { loop_nesting_--; } | |
| 399 | |
| 400 // Node visitors. | |
| 401 void VisitStatements(ZoneList<Statement*>* statements); | |
| 402 | |
| 403 virtual void VisitSlot(Slot* node); | |
| 404 #define DEF_VISIT(type) \ | |
| 405 virtual void Visit##type(type* node); | |
| 406 AST_NODE_LIST(DEF_VISIT) | |
| 407 #undef DEF_VISIT | |
| 408 | |
| 409 // Visit a statement and then spill the virtual frame if control flow can | |
| 410 // reach the end of the statement (ie, it does not exit via break, | |
| 411 // continue, return, or throw). This function is used temporarily while | |
| 412 // the code generator is being transformed. | |
| 413 void VisitAndSpill(Statement* statement); | |
| 414 | |
| 415 // Visit a list of statements and then spill the virtual frame if control | |
| 416 // flow can reach the end of the list. | |
| 417 void VisitStatementsAndSpill(ZoneList<Statement*>* statements); | |
| 418 | |
| 419 // Main code generation function | |
| 420 void Generate(CompilationInfo* info); | |
| 421 | |
| 422 // Generate the return sequence code. Should be called no more than | |
| 423 // once per compiled function, immediately after binding the return | |
| 424 // target (which can not be done more than once). | |
| 425 void GenerateReturnSequence(Result* return_value); | |
| 426 | |
| 427 // Returns the arguments allocation mode. | |
| 428 ArgumentsAllocationMode ArgumentsMode(); | |
| 429 | |
| 430 // Store the arguments object and allocate it if necessary. | |
| 431 Result StoreArgumentsObject(bool initial); | |
| 432 | |
| 433 // The following are used by class Reference. | |
| 434 void LoadReference(Reference* ref); | |
| 435 | |
| 436 Operand SlotOperand(Slot* slot, Register tmp); | |
| 437 | |
| 438 Operand ContextSlotOperandCheckExtensions(Slot* slot, | |
| 439 Result tmp, | |
| 440 JumpTarget* slow); | |
| 441 | |
| 442 // Expressions | |
| 443 void LoadCondition(Expression* expr, | |
| 444 ControlDestination* destination, | |
| 445 bool force_control); | |
| 446 void Load(Expression* expr); | |
| 447 void LoadGlobal(); | |
| 448 void LoadGlobalReceiver(); | |
| 449 | |
| 450 // Generate code to push the value of an expression on top of the frame | |
| 451 // and then spill the frame fully to memory. This function is used | |
| 452 // temporarily while the code generator is being transformed. | |
| 453 void LoadAndSpill(Expression* expression); | |
| 454 | |
| 455 // Evaluate an expression and place its value on top of the frame, | |
| 456 // using, or not using, the side-effect-free expression compiler. | |
| 457 void LoadInSafeInt32Mode(Expression* expr, BreakTarget* unsafe_bailout); | |
| 458 void LoadWithSafeInt32ModeDisabled(Expression* expr); | |
| 459 | |
| 460 // Read a value from a slot and leave it on top of the expression stack. | |
| 461 void LoadFromSlot(Slot* slot, TypeofState typeof_state); | |
| 462 void LoadFromSlotCheckForArguments(Slot* slot, TypeofState typeof_state); | |
| 463 Result LoadFromGlobalSlotCheckExtensions(Slot* slot, | |
| 464 TypeofState typeof_state, | |
| 465 JumpTarget* slow); | |
| 466 | |
| 467 // Support for loading from local/global variables and arguments | |
| 468 // whose location is known unless they are shadowed by | |
| 469 // eval-introduced bindings. Generates no code for unsupported slot | |
| 470 // types and therefore expects to fall through to the slow jump target. | |
| 471 void EmitDynamicLoadFromSlotFastCase(Slot* slot, | |
| 472 TypeofState typeof_state, | |
| 473 Result* result, | |
| 474 JumpTarget* slow, | |
| 475 JumpTarget* done); | |
| 476 | |
| 477 // Store the value on top of the expression stack into a slot, leaving the | |
| 478 // value in place. | |
| 479 void StoreToSlot(Slot* slot, InitState init_state); | |
| 480 | |
| 481 // Support for compiling assignment expressions. | |
| 482 void EmitSlotAssignment(Assignment* node); | |
| 483 void EmitNamedPropertyAssignment(Assignment* node); | |
| 484 void EmitKeyedPropertyAssignment(Assignment* node); | |
| 485 | |
| 486 // Receiver is passed on the frame and consumed. | |
| 487 Result EmitNamedLoad(Handle<String> name, bool is_contextual); | |
| 488 | |
| 489 // If the store is contextual, value is passed on the frame and consumed. | |
| 490 // Otherwise, receiver and value are passed on the frame and consumed. | |
| 491 Result EmitNamedStore(Handle<String> name, bool is_contextual); | |
| 492 | |
| 493 // Receiver and key are passed on the frame and consumed. | |
| 494 Result EmitKeyedLoad(); | |
| 495 | |
| 496 // Receiver, key, and value are passed on the frame and consumed. | |
| 497 Result EmitKeyedStore(StaticType* key_type); | |
| 498 | |
| 499 // Special code for typeof expressions: Unfortunately, we must | |
| 500 // be careful when loading the expression in 'typeof' | |
| 501 // expressions. We are not allowed to throw reference errors for | |
| 502 // non-existing properties of the global object, so we must make it | |
| 503 // look like an explicit property access, instead of an access | |
| 504 // through the context chain. | |
| 505 void LoadTypeofExpression(Expression* x); | |
| 506 | |
| 507 // Translate the value on top of the frame into control flow to the | |
| 508 // control destination. | |
| 509 void ToBoolean(ControlDestination* destination); | |
| 510 | |
| 511 // Generate code that computes a shortcutting logical operation. | |
| 512 void GenerateLogicalBooleanOperation(BinaryOperation* node); | |
| 513 | |
| 514 void GenericBinaryOperation(BinaryOperation* expr, | |
| 515 OverwriteMode overwrite_mode); | |
| 516 | |
| 517 // Emits code sequence that jumps to a JumpTarget if the inputs | |
| 518 // are both smis. Cannot be in MacroAssembler because it takes | |
| 519 // advantage of TypeInfo to skip unneeded checks. | |
| 520 // Allocates a temporary register, possibly spilling from the frame, | |
| 521 // if it needs to check both left and right. | |
| 522 void JumpIfBothSmiUsingTypeInfo(Result* left, | |
| 523 Result* right, | |
| 524 JumpTarget* both_smi); | |
| 525 | |
| 526 // Emits code sequence that jumps to deferred code if the inputs | |
| 527 // are not both smis. Cannot be in MacroAssembler because it takes | |
| 528 // a deferred code object. | |
| 529 void JumpIfNotBothSmiUsingTypeInfo(Register left, | |
| 530 Register right, | |
| 531 Register scratch, | |
| 532 TypeInfo left_info, | |
| 533 TypeInfo right_info, | |
| 534 DeferredCode* deferred); | |
| 535 | |
| 536 // Emits code sequence that jumps to the label if the inputs | |
| 537 // are not both smis. | |
| 538 void JumpIfNotBothSmiUsingTypeInfo(Register left, | |
| 539 Register right, | |
| 540 Register scratch, | |
| 541 TypeInfo left_info, | |
| 542 TypeInfo right_info, | |
| 543 Label* on_non_smi); | |
| 544 | |
| 545 // If possible, combine two constant smi values using op to produce | |
| 546 // a smi result, and push it on the virtual frame, all at compile time. | |
| 547 // Returns true if it succeeds. Otherwise it has no effect. | |
| 548 bool FoldConstantSmis(Token::Value op, int left, int right); | |
| 549 | |
| 550 // Emit code to perform a binary operation on a constant | |
| 551 // smi and a likely smi. Consumes the Result operand. | |
| 552 Result ConstantSmiBinaryOperation(BinaryOperation* expr, | |
| 553 Result* operand, | |
| 554 Handle<Object> constant_operand, | |
| 555 bool reversed, | |
| 556 OverwriteMode overwrite_mode); | |
| 557 | |
| 558 // Emit code to perform a binary operation on two likely smis. | |
| 559 // The code to handle smi arguments is produced inline. | |
| 560 // Consumes the Results left and right. | |
| 561 Result LikelySmiBinaryOperation(BinaryOperation* expr, | |
| 562 Result* left, | |
| 563 Result* right, | |
| 564 OverwriteMode overwrite_mode); | |
| 565 | |
| 566 | |
| 567 // Emit code to perform a binary operation on two untagged int32 values. | |
| 568 // The values are on top of the frame, and the result is pushed on the frame. | |
| 569 void Int32BinaryOperation(BinaryOperation* node); | |
| 570 | |
| 571 | |
| 572 // Generate a stub call from the virtual frame. | |
| 573 Result GenerateGenericBinaryOpStubCall(GenericBinaryOpStub* stub, | |
| 574 Result* left, | |
| 575 Result* right); | |
| 576 | |
| 577 void Comparison(AstNode* node, | |
| 578 Condition cc, | |
| 579 bool strict, | |
| 580 ControlDestination* destination); | |
| 581 | |
| 582 // If at least one of the sides is a constant smi, generate optimized code. | |
| 583 void ConstantSmiComparison(Condition cc, | |
| 584 bool strict, | |
| 585 ControlDestination* destination, | |
| 586 Result* left_side, | |
| 587 Result* right_side, | |
| 588 bool left_side_constant_smi, | |
| 589 bool right_side_constant_smi, | |
| 590 bool is_loop_condition); | |
| 591 | |
| 592 void GenerateInlineNumberComparison(Result* left_side, | |
| 593 Result* right_side, | |
| 594 Condition cc, | |
| 595 ControlDestination* dest); | |
| 596 | |
| 597 // To prevent long attacker-controlled byte sequences, integer constants | |
| 598 // from the JavaScript source are loaded in two parts if they are larger | |
| 599 // than 17 bits. | |
| 600 static const int kMaxSmiInlinedBits = 17; | |
| 601 bool IsUnsafeSmi(Handle<Object> value); | |
| 602 // Load an integer constant x into a register target or into the stack using | |
| 603 // at most 16 bits of user-controlled data per assembly operation. | |
| 604 void MoveUnsafeSmi(Register target, Handle<Object> value); | |
| 605 void StoreUnsafeSmiToLocal(int offset, Handle<Object> value); | |
| 606 void PushUnsafeSmi(Handle<Object> value); | |
| 607 | |
| 608 void CallWithArguments(ZoneList<Expression*>* arguments, | |
| 609 CallFunctionFlags flags, | |
| 610 int position); | |
| 611 | |
| 612 // An optimized implementation of expressions of the form | |
| 613 // x.apply(y, arguments). We call x the applicand and y the receiver. | |
| 614 // The optimization avoids allocating an arguments object if possible. | |
| 615 void CallApplyLazy(Expression* applicand, | |
| 616 Expression* receiver, | |
| 617 VariableProxy* arguments, | |
| 618 int position); | |
| 619 | |
| 620 void CheckStack(); | |
| 621 | |
| 622 bool CheckForInlineRuntimeCall(CallRuntime* node); | |
| 623 | |
| 624 void ProcessDeclarations(ZoneList<Declaration*>* declarations); | |
| 625 | |
| 626 // Declare global variables and functions in the given array of | |
| 627 // name/value pairs. | |
| 628 void DeclareGlobals(Handle<FixedArray> pairs); | |
| 629 | |
| 630 // Instantiate the function based on the shared function info. | |
| 631 Result InstantiateFunction(Handle<SharedFunctionInfo> function_info, | |
| 632 bool pretenure); | |
| 633 | |
| 634 // Support for types. | |
| 635 void GenerateIsSmi(ZoneList<Expression*>* args); | |
| 636 void GenerateIsNonNegativeSmi(ZoneList<Expression*>* args); | |
| 637 void GenerateIsArray(ZoneList<Expression*>* args); | |
| 638 void GenerateIsRegExp(ZoneList<Expression*>* args); | |
| 639 void GenerateIsObject(ZoneList<Expression*>* args); | |
| 640 void GenerateIsSpecObject(ZoneList<Expression*>* args); | |
| 641 void GenerateIsFunction(ZoneList<Expression*>* args); | |
| 642 void GenerateIsUndetectableObject(ZoneList<Expression*>* args); | |
| 643 void GenerateIsStringWrapperSafeForDefaultValueOf( | |
| 644 ZoneList<Expression*>* args); | |
| 645 | |
| 646 // Support for construct call checks. | |
| 647 void GenerateIsConstructCall(ZoneList<Expression*>* args); | |
| 648 | |
| 649 // Support for arguments.length and arguments[?]. | |
| 650 void GenerateArgumentsLength(ZoneList<Expression*>* args); | |
| 651 void GenerateArguments(ZoneList<Expression*>* args); | |
| 652 | |
| 653 // Support for accessing the class and value fields of an object. | |
| 654 void GenerateClassOf(ZoneList<Expression*>* args); | |
| 655 void GenerateValueOf(ZoneList<Expression*>* args); | |
| 656 void GenerateSetValueOf(ZoneList<Expression*>* args); | |
| 657 | |
| 658 // Fast support for charCodeAt(n). | |
| 659 void GenerateStringCharCodeAt(ZoneList<Expression*>* args); | |
| 660 | |
| 661 // Fast support for string.charAt(n) and string[n]. | |
| 662 void GenerateStringCharFromCode(ZoneList<Expression*>* args); | |
| 663 | |
| 664 // Fast support for string.charAt(n) and string[n]. | |
| 665 void GenerateStringCharAt(ZoneList<Expression*>* args); | |
| 666 | |
| 667 // Fast support for object equality testing. | |
| 668 void GenerateObjectEquals(ZoneList<Expression*>* args); | |
| 669 | |
| 670 void GenerateLog(ZoneList<Expression*>* args); | |
| 671 | |
| 672 void GenerateGetFramePointer(ZoneList<Expression*>* args); | |
| 673 | |
| 674 // Fast support for Math.random(). | |
| 675 void GenerateRandomHeapNumber(ZoneList<Expression*>* args); | |
| 676 | |
| 677 // Fast support for StringAdd. | |
| 678 void GenerateStringAdd(ZoneList<Expression*>* args); | |
| 679 | |
| 680 // Fast support for SubString. | |
| 681 void GenerateSubString(ZoneList<Expression*>* args); | |
| 682 | |
| 683 // Fast support for StringCompare. | |
| 684 void GenerateStringCompare(ZoneList<Expression*>* args); | |
| 685 | |
| 686 // Support for direct calls from JavaScript to native RegExp code. | |
| 687 void GenerateRegExpExec(ZoneList<Expression*>* args); | |
| 688 | |
| 689 // Construct a RegExp exec result with two in-object properties. | |
| 690 void GenerateRegExpConstructResult(ZoneList<Expression*>* args); | |
| 691 | |
| 692 // Support for fast native caches. | |
| 693 void GenerateGetFromCache(ZoneList<Expression*>* args); | |
| 694 | |
| 695 // Fast support for number to string. | |
| 696 void GenerateNumberToString(ZoneList<Expression*>* args); | |
| 697 | |
| 698 // Fast swapping of elements. Takes three expressions, the object and two | |
| 699 // indices. This should only be used if the indices are known to be | |
| 700 // non-negative and within bounds of the elements array at the call site. | |
| 701 void GenerateSwapElements(ZoneList<Expression*>* args); | |
| 702 | |
| 703 // Fast call for custom callbacks. | |
| 704 void GenerateCallFunction(ZoneList<Expression*>* args); | |
| 705 | |
| 706 // Fast call to math functions. | |
| 707 void GenerateMathPow(ZoneList<Expression*>* args); | |
| 708 void GenerateMathSin(ZoneList<Expression*>* args); | |
| 709 void GenerateMathCos(ZoneList<Expression*>* args); | |
| 710 void GenerateMathSqrt(ZoneList<Expression*>* args); | |
| 711 void GenerateMathLog(ZoneList<Expression*>* args); | |
| 712 | |
| 713 // Check whether two RegExps are equivalent. | |
| 714 void GenerateIsRegExpEquivalent(ZoneList<Expression*>* args); | |
| 715 | |
| 716 void GenerateHasCachedArrayIndex(ZoneList<Expression*>* args); | |
| 717 void GenerateGetCachedArrayIndex(ZoneList<Expression*>* args); | |
| 718 void GenerateFastAsciiArrayJoin(ZoneList<Expression*>* args); | |
| 719 | |
| 720 // Simple condition analysis. | |
| 721 enum ConditionAnalysis { | |
| 722 ALWAYS_TRUE, | |
| 723 ALWAYS_FALSE, | |
| 724 DONT_KNOW | |
| 725 }; | |
| 726 ConditionAnalysis AnalyzeCondition(Expression* cond); | |
| 727 | |
| 728 // Methods used to indicate which source code is generated for. Source | |
| 729 // positions are collected by the assembler and emitted with the relocation | |
| 730 // information. | |
| 731 void CodeForFunctionPosition(FunctionLiteral* fun); | |
| 732 void CodeForReturnPosition(FunctionLiteral* fun); | |
| 733 void CodeForStatementPosition(Statement* stmt); | |
| 734 void CodeForDoWhileConditionPosition(DoWhileStatement* stmt); | |
| 735 void CodeForSourcePosition(int pos); | |
| 736 | |
| 737 void SetTypeForStackSlot(Slot* slot, TypeInfo info); | |
| 738 | |
| 739 #ifdef DEBUG | |
| 740 // True if the registers are valid for entry to a block. There should | |
| 741 // be no frame-external references to (non-reserved) registers. | |
| 742 bool HasValidEntryRegisters(); | |
| 743 #endif | |
| 744 | |
| 745 ZoneList<DeferredCode*> deferred_; | |
| 746 | |
| 747 // Assembler | |
| 748 MacroAssembler* masm_; // to generate code | |
| 749 | |
| 750 CompilationInfo* info_; | |
| 751 | |
| 752 // Code generation state | |
| 753 VirtualFrame* frame_; | |
| 754 RegisterAllocator* allocator_; | |
| 755 CodeGenState* state_; | |
| 756 int loop_nesting_; | |
| 757 bool in_safe_int32_mode_; | |
| 758 bool safe_int32_mode_enabled_; | |
| 759 | |
| 760 // Jump targets. | |
| 761 // The target of the return from the function. | |
| 762 BreakTarget function_return_; | |
| 763 // The target of the bailout from a side-effect-free int32 subexpression. | |
| 764 BreakTarget* unsafe_bailout_; | |
| 765 | |
| 766 // True if the function return is shadowed (ie, jumping to the target | |
| 767 // function_return_ does not jump to the true function return, but rather | |
| 768 // to some unlinking code). | |
| 769 bool function_return_is_shadowed_; | |
| 770 | |
| 771 // True when we are in code that expects the virtual frame to be fully | |
| 772 // spilled. Some virtual frame function are disabled in DEBUG builds when | |
| 773 // called from spilled code, because they do not leave the virtual frame | |
| 774 // in a spilled state. | |
| 775 bool in_spilled_code_; | |
| 776 | |
| 777 // A cookie that is used for JIT IMM32 Encoding. Initialized to a | |
| 778 // random number when the command-line | |
| 779 // FLAG_mask_constants_with_cookie is true, zero otherwise. | |
| 780 int jit_cookie_; | |
| 781 | |
| 782 friend class VirtualFrame; | |
| 783 friend class Isolate; | |
| 784 friend class JumpTarget; | |
| 785 friend class Reference; | |
| 786 friend class Result; | |
| 787 friend class FastCodeGenerator; | |
| 788 friend class FullCodeGenerator; | |
| 789 friend class FullCodeGenSyntaxChecker; | |
| 790 friend class LCodeGen; | |
| 791 | |
| 792 friend class CodeGeneratorPatcher; // Used in test-log-stack-tracer.cc | |
| 793 friend class InlineRuntimeFunctionsTable; | |
| 794 | |
| 795 DISALLOW_COPY_AND_ASSIGN(CodeGenerator); | 73 DISALLOW_COPY_AND_ASSIGN(CodeGenerator); |
| 796 }; | 74 }; |
| 797 | 75 |
| 798 | 76 |
| 799 } } // namespace v8::internal | 77 } } // namespace v8::internal |
| 800 | 78 |
| 801 #endif // V8_IA32_CODEGEN_IA32_H_ | 79 #endif // V8_IA32_CODEGEN_IA32_H_ |
| OLD | NEW |