| OLD | NEW |
| (Empty) |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | |
| 2 // Redistribution and use in source and binary forms, with or without | |
| 3 // modification, are permitted provided that the following conditions are | |
| 4 // met: | |
| 5 // | |
| 6 // * Redistributions of source code must retain the above copyright | |
| 7 // notice, this list of conditions and the following disclaimer. | |
| 8 // * Redistributions in binary form must reproduce the above | |
| 9 // copyright notice, this list of conditions and the following | |
| 10 // disclaimer in the documentation and/or other materials provided | |
| 11 // with the distribution. | |
| 12 // * Neither the name of Google Inc. nor the names of its | |
| 13 // contributors may be used to endorse or promote products derived | |
| 14 // from this software without specific prior written permission. | |
| 15 // | |
| 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | |
| 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | |
| 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | |
| 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | |
| 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
| 27 | |
| 28 #ifndef V8_CODEGEN_ARM_H_ | |
| 29 #define V8_CODEGEN_ARM_H_ | |
| 30 | |
| 31 namespace v8 { namespace internal { | |
| 32 | |
| 33 // Forward declarations | |
| 34 class DeferredCode; | |
| 35 class RegisterAllocator; | |
| 36 class RegisterFile; | |
| 37 | |
| 38 enum InitState { CONST_INIT, NOT_CONST_INIT }; | |
| 39 enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF }; | |
| 40 | |
| 41 | |
| 42 // ------------------------------------------------------------------------- | |
| 43 // Reference support | |
| 44 | |
| 45 // A reference is a C++ stack-allocated object that keeps an ECMA | |
| 46 // reference on the execution stack while in scope. For variables | |
| 47 // the reference is empty, indicating that it isn't necessary to | |
| 48 // store state on the stack for keeping track of references to those. | |
| 49 // For properties, we keep either one (named) or two (indexed) values | |
| 50 // on the execution stack to represent the reference. | |
| 51 | |
| 52 class Reference BASE_EMBEDDED { | |
| 53 public: | |
| 54 // The values of the types is important, see size(). | |
| 55 enum Type { ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 }; | |
| 56 Reference(CodeGenerator* cgen, Expression* expression); | |
| 57 ~Reference(); | |
| 58 | |
| 59 Expression* expression() const { return expression_; } | |
| 60 Type type() const { return type_; } | |
| 61 void set_type(Type value) { | |
| 62 ASSERT(type_ == ILLEGAL); | |
| 63 type_ = value; | |
| 64 } | |
| 65 | |
| 66 // The size the reference takes up on the stack. | |
| 67 int size() const { return (type_ == ILLEGAL) ? 0 : type_; } | |
| 68 | |
| 69 bool is_illegal() const { return type_ == ILLEGAL; } | |
| 70 bool is_slot() const { return type_ == SLOT; } | |
| 71 bool is_property() const { return type_ == NAMED || type_ == KEYED; } | |
| 72 | |
| 73 // Return the name. Only valid for named property references. | |
| 74 Handle<String> GetName(); | |
| 75 | |
| 76 // Generate code to push the value of the reference on top of the | |
| 77 // expression stack. The reference is expected to be already on top of | |
| 78 // the expression stack, and it is left in place with its value above it. | |
| 79 void GetValue(TypeofState typeof_state); | |
| 80 | |
| 81 // Generate code to push the value of a reference on top of the expression | |
| 82 // stack and then spill the stack frame. This function is used temporarily | |
| 83 // while the code generator is being transformed. | |
| 84 inline void GetValueAndSpill(TypeofState typeof_state); | |
| 85 | |
| 86 // Generate code to store the value on top of the expression stack in the | |
| 87 // reference. The reference is expected to be immediately below the value | |
| 88 // on the expression stack. The stored value is left in place (with the | |
| 89 // reference intact below it) to support chained assignments. | |
| 90 void SetValue(InitState init_state); | |
| 91 | |
| 92 private: | |
| 93 CodeGenerator* cgen_; | |
| 94 Expression* expression_; | |
| 95 Type type_; | |
| 96 }; | |
| 97 | |
| 98 | |
| 99 // ------------------------------------------------------------------------- | |
| 100 // Code generation state | |
| 101 | |
| 102 // The state is passed down the AST by the code generator (and back up, in | |
| 103 // the form of the state of the label pair). It is threaded through the | |
| 104 // call stack. Constructing a state implicitly pushes it on the owning code | |
| 105 // generator's stack of states, and destroying one implicitly pops it. | |
| 106 | |
| 107 class CodeGenState BASE_EMBEDDED { | |
| 108 public: | |
| 109 // Create an initial code generator state. Destroying the initial state | |
| 110 // leaves the code generator with a NULL state. | |
| 111 explicit CodeGenState(CodeGenerator* owner); | |
| 112 | |
| 113 // Create a code generator state based on a code generator's current | |
| 114 // state. The new state has its own typeof state and pair of branch | |
| 115 // labels. | |
| 116 CodeGenState(CodeGenerator* owner, | |
| 117 TypeofState typeof_state, | |
| 118 JumpTarget* true_target, | |
| 119 JumpTarget* false_target); | |
| 120 | |
| 121 // Destroy a code generator state and restore the owning code generator's | |
| 122 // previous state. | |
| 123 ~CodeGenState(); | |
| 124 | |
| 125 TypeofState typeof_state() const { return typeof_state_; } | |
| 126 JumpTarget* true_target() const { return true_target_; } | |
| 127 JumpTarget* false_target() const { return false_target_; } | |
| 128 | |
| 129 private: | |
| 130 CodeGenerator* owner_; | |
| 131 TypeofState typeof_state_; | |
| 132 JumpTarget* true_target_; | |
| 133 JumpTarget* false_target_; | |
| 134 CodeGenState* previous_; | |
| 135 }; | |
| 136 | |
| 137 | |
| 138 // ------------------------------------------------------------------------- | |
| 139 // CodeGenerator | |
| 140 | |
| 141 class CodeGenerator: public AstVisitor { | |
| 142 public: | |
| 143 // Takes a function literal, generates code for it. This function should only | |
| 144 // be called by compiler.cc. | |
| 145 static Handle<Code> MakeCode(FunctionLiteral* fun, | |
| 146 Handle<Script> script, | |
| 147 bool is_eval); | |
| 148 | |
| 149 #ifdef ENABLE_LOGGING_AND_PROFILING | |
| 150 static bool ShouldGenerateLog(Expression* type); | |
| 151 #endif | |
| 152 | |
| 153 static void SetFunctionInfo(Handle<JSFunction> fun, | |
| 154 int length, | |
| 155 int function_token_position, | |
| 156 int start_position, | |
| 157 int end_position, | |
| 158 bool is_expression, | |
| 159 bool is_toplevel, | |
| 160 Handle<Script> script, | |
| 161 Handle<String> inferred_name); | |
| 162 | |
| 163 // Accessors | |
| 164 MacroAssembler* masm() { return masm_; } | |
| 165 | |
| 166 VirtualFrame* frame() const { return frame_; } | |
| 167 | |
| 168 bool has_valid_frame() const { return frame_ != NULL; } | |
| 169 | |
| 170 // Set the virtual frame to be new_frame, with non-frame register | |
| 171 // reference counts given by non_frame_registers. The non-frame | |
| 172 // register reference counts of the old frame are returned in | |
| 173 // non_frame_registers. | |
| 174 void SetFrame(VirtualFrame* new_frame, RegisterFile* non_frame_registers); | |
| 175 | |
| 176 void DeleteFrame(); | |
| 177 | |
| 178 RegisterAllocator* allocator() const { return allocator_; } | |
| 179 | |
| 180 CodeGenState* state() { return state_; } | |
| 181 void set_state(CodeGenState* state) { state_ = state; } | |
| 182 | |
| 183 void AddDeferred(DeferredCode* code) { deferred_.Add(code); } | |
| 184 | |
| 185 bool in_spilled_code() const { return in_spilled_code_; } | |
| 186 void set_in_spilled_code(bool flag) { in_spilled_code_ = flag; } | |
| 187 | |
| 188 private: | |
| 189 // Construction/Destruction | |
| 190 CodeGenerator(int buffer_size, Handle<Script> script, bool is_eval); | |
| 191 virtual ~CodeGenerator() { delete masm_; } | |
| 192 | |
| 193 // Accessors | |
| 194 Scope* scope() const { return scope_; } | |
| 195 | |
| 196 // Clearing and generating deferred code. | |
| 197 void ClearDeferred(); | |
| 198 void ProcessDeferred(); | |
| 199 | |
| 200 bool is_eval() { return is_eval_; } | |
| 201 | |
| 202 // State | |
| 203 bool has_cc() const { return cc_reg_ != al; } | |
| 204 TypeofState typeof_state() const { return state_->typeof_state(); } | |
| 205 JumpTarget* true_target() const { return state_->true_target(); } | |
| 206 JumpTarget* false_target() const { return state_->false_target(); } | |
| 207 | |
| 208 | |
| 209 // Node visitors. | |
| 210 void VisitStatements(ZoneList<Statement*>* statements); | |
| 211 | |
| 212 #define DEF_VISIT(type) \ | |
| 213 void Visit##type(type* node); | |
| 214 NODE_LIST(DEF_VISIT) | |
| 215 #undef DEF_VISIT | |
| 216 | |
| 217 // Visit a statement and then spill the virtual frame if control flow can | |
| 218 // reach the end of the statement (ie, it does not exit via break, | |
| 219 // continue, return, or throw). This function is used temporarily while | |
| 220 // the code generator is being transformed. | |
| 221 void VisitAndSpill(Statement* statement); | |
| 222 | |
| 223 // Visit a list of statements and then spill the virtual frame if control | |
| 224 // flow can reach the end of the list. | |
| 225 void VisitStatementsAndSpill(ZoneList<Statement*>* statements); | |
| 226 | |
| 227 // Main code generation function | |
| 228 void GenCode(FunctionLiteral* fun); | |
| 229 | |
| 230 // The following are used by class Reference. | |
| 231 void LoadReference(Reference* ref); | |
| 232 void UnloadReference(Reference* ref); | |
| 233 | |
| 234 MemOperand ContextOperand(Register context, int index) const { | |
| 235 return MemOperand(context, Context::SlotOffset(index)); | |
| 236 } | |
| 237 | |
| 238 MemOperand SlotOperand(Slot* slot, Register tmp); | |
| 239 | |
| 240 MemOperand ContextSlotOperandCheckExtensions(Slot* slot, | |
| 241 Register tmp, | |
| 242 Register tmp2, | |
| 243 JumpTarget* slow); | |
| 244 | |
| 245 // Expressions | |
| 246 MemOperand GlobalObject() const { | |
| 247 return ContextOperand(cp, Context::GLOBAL_INDEX); | |
| 248 } | |
| 249 | |
| 250 void LoadCondition(Expression* x, | |
| 251 TypeofState typeof_state, | |
| 252 JumpTarget* true_target, | |
| 253 JumpTarget* false_target, | |
| 254 bool force_cc); | |
| 255 void Load(Expression* x, TypeofState typeof_state = NOT_INSIDE_TYPEOF); | |
| 256 void LoadGlobal(); | |
| 257 void LoadGlobalReceiver(Register scratch); | |
| 258 | |
| 259 // Generate code to push the value of an expression on top of the frame | |
| 260 // and then spill the frame fully to memory. This function is used | |
| 261 // temporarily while the code generator is being transformed. | |
| 262 void LoadAndSpill(Expression* expression, | |
| 263 TypeofState typeof_state = NOT_INSIDE_TYPEOF); | |
| 264 | |
| 265 // Call LoadCondition and then spill the virtual frame unless control flow | |
| 266 // cannot reach the end of the expression (ie, by emitting only | |
| 267 // unconditional jumps to the control targets). | |
| 268 void LoadConditionAndSpill(Expression* expression, | |
| 269 TypeofState typeof_state, | |
| 270 JumpTarget* true_target, | |
| 271 JumpTarget* false_target, | |
| 272 bool force_control); | |
| 273 | |
| 274 // Read a value from a slot and leave it on top of the expression stack. | |
| 275 void LoadFromSlot(Slot* slot, TypeofState typeof_state); | |
| 276 void LoadFromGlobalSlotCheckExtensions(Slot* slot, | |
| 277 TypeofState typeof_state, | |
| 278 Register tmp, | |
| 279 Register tmp2, | |
| 280 JumpTarget* slow); | |
| 281 | |
| 282 // Special code for typeof expressions: Unfortunately, we must | |
| 283 // be careful when loading the expression in 'typeof' | |
| 284 // expressions. We are not allowed to throw reference errors for | |
| 285 // non-existing properties of the global object, so we must make it | |
| 286 // look like an explicit property access, instead of an access | |
| 287 // through the context chain. | |
| 288 void LoadTypeofExpression(Expression* x); | |
| 289 | |
| 290 void ToBoolean(JumpTarget* true_target, JumpTarget* false_target); | |
| 291 | |
| 292 void GenericBinaryOperation(Token::Value op, OverwriteMode overwrite_mode); | |
| 293 void Comparison(Condition cc, bool strict = false); | |
| 294 | |
| 295 void SmiOperation(Token::Value op, | |
| 296 Handle<Object> value, | |
| 297 bool reversed, | |
| 298 OverwriteMode mode); | |
| 299 | |
| 300 void CallWithArguments(ZoneList<Expression*>* arguments, int position); | |
| 301 | |
| 302 // Control flow | |
| 303 void Branch(bool if_true, JumpTarget* target); | |
| 304 void CheckStack(); | |
| 305 | |
| 306 bool CheckForInlineRuntimeCall(CallRuntime* node); | |
| 307 Handle<JSFunction> BuildBoilerplate(FunctionLiteral* node); | |
| 308 void ProcessDeclarations(ZoneList<Declaration*>* declarations); | |
| 309 | |
| 310 Handle<Code> ComputeCallInitialize(int argc); | |
| 311 Handle<Code> ComputeCallInitializeInLoop(int argc); | |
| 312 | |
| 313 // Declare global variables and functions in the given array of | |
| 314 // name/value pairs. | |
| 315 void DeclareGlobals(Handle<FixedArray> pairs); | |
| 316 | |
| 317 // Instantiate the function boilerplate. | |
| 318 void InstantiateBoilerplate(Handle<JSFunction> boilerplate); | |
| 319 | |
| 320 // Support for type checks. | |
| 321 void GenerateIsSmi(ZoneList<Expression*>* args); | |
| 322 void GenerateIsNonNegativeSmi(ZoneList<Expression*>* args); | |
| 323 void GenerateIsArray(ZoneList<Expression*>* args); | |
| 324 | |
| 325 // Support for arguments.length and arguments[?]. | |
| 326 void GenerateArgumentsLength(ZoneList<Expression*>* args); | |
| 327 void GenerateArgumentsAccess(ZoneList<Expression*>* args); | |
| 328 | |
| 329 // Support for accessing the value field of an object (used by Date). | |
| 330 void GenerateValueOf(ZoneList<Expression*>* args); | |
| 331 void GenerateSetValueOf(ZoneList<Expression*>* args); | |
| 332 | |
| 333 // Fast support for charCodeAt(n). | |
| 334 void GenerateFastCharCodeAt(ZoneList<Expression*>* args); | |
| 335 | |
| 336 // Fast support for object equality testing. | |
| 337 void GenerateObjectEquals(ZoneList<Expression*>* args); | |
| 338 | |
| 339 void GenerateLog(ZoneList<Expression*>* args); | |
| 340 | |
| 341 // Methods and constants for fast case switch statement support. | |
| 342 // | |
| 343 // Only allow fast-case switch if the range of labels is at most | |
| 344 // this factor times the number of case labels. | |
| 345 // Value is derived from comparing the size of code generated by the normal | |
| 346 // switch code for Smi-labels to the size of a single pointer. If code | |
| 347 // quality increases this number should be decreased to match. | |
| 348 static const int kFastSwitchMaxOverheadFactor = 10; | |
| 349 | |
| 350 // Minimal number of switch cases required before we allow jump-table | |
| 351 // optimization. | |
| 352 static const int kFastSwitchMinCaseCount = 5; | |
| 353 | |
| 354 // The limit of the range of a fast-case switch, as a factor of the number | |
| 355 // of cases of the switch. Each platform should return a value that | |
| 356 // is optimal compared to the default code generated for a switch statement | |
| 357 // on that platform. | |
| 358 int FastCaseSwitchMaxOverheadFactor(); | |
| 359 | |
| 360 // The minimal number of cases in a switch before the fast-case switch | |
| 361 // optimization is enabled. Each platform should return a value that | |
| 362 // is optimal compared to the default code generated for a switch statement | |
| 363 // on that platform. | |
| 364 int FastCaseSwitchMinCaseCount(); | |
| 365 | |
| 366 // Allocate a jump table and create code to jump through it. | |
| 367 // Should call GenerateFastCaseSwitchCases to generate the code for | |
| 368 // all the cases at the appropriate point. | |
| 369 void GenerateFastCaseSwitchJumpTable(SwitchStatement* node, | |
| 370 int min_index, | |
| 371 int range, | |
| 372 Label* default_label, | |
| 373 Vector<Label*> case_targets, | |
| 374 Vector<Label> case_labels); | |
| 375 | |
| 376 // Generate the code for cases for the fast case switch. | |
| 377 // Called by GenerateFastCaseSwitchJumpTable. | |
| 378 void GenerateFastCaseSwitchCases(SwitchStatement* node, | |
| 379 Vector<Label> case_labels, | |
| 380 VirtualFrame* start_frame); | |
| 381 | |
| 382 // Fast support for constant-Smi switches. | |
| 383 void GenerateFastCaseSwitchStatement(SwitchStatement* node, | |
| 384 int min_index, | |
| 385 int range, | |
| 386 int default_index); | |
| 387 | |
| 388 // Fast support for constant-Smi switches. Tests whether switch statement | |
| 389 // permits optimization and calls GenerateFastCaseSwitch if it does. | |
| 390 // Returns true if the fast-case switch was generated, and false if not. | |
| 391 bool TryGenerateFastCaseSwitchStatement(SwitchStatement* node); | |
| 392 | |
| 393 | |
| 394 // Methods used to indicate which source code is generated for. Source | |
| 395 // positions are collected by the assembler and emitted with the relocation | |
| 396 // information. | |
| 397 void CodeForFunctionPosition(FunctionLiteral* fun); | |
| 398 void CodeForReturnPosition(FunctionLiteral* fun); | |
| 399 void CodeForStatementPosition(Node* node); | |
| 400 void CodeForSourcePosition(int pos); | |
| 401 | |
| 402 #ifdef DEBUG | |
| 403 // True if the registers are valid for entry to a block. | |
| 404 bool HasValidEntryRegisters(); | |
| 405 #endif | |
| 406 | |
| 407 bool is_eval_; // Tells whether code is generated for eval. | |
| 408 | |
| 409 Handle<Script> script_; | |
| 410 List<DeferredCode*> deferred_; | |
| 411 | |
| 412 // Assembler | |
| 413 MacroAssembler* masm_; // to generate code | |
| 414 | |
| 415 // Code generation state | |
| 416 Scope* scope_; | |
| 417 VirtualFrame* frame_; | |
| 418 RegisterAllocator* allocator_; | |
| 419 Condition cc_reg_; | |
| 420 CodeGenState* state_; | |
| 421 | |
| 422 // Jump targets | |
| 423 BreakTarget function_return_; | |
| 424 | |
| 425 // True if the function return is shadowed (ie, jumping to the target | |
| 426 // function_return_ does not jump to the true function return, but rather | |
| 427 // to some unlinking code). | |
| 428 bool function_return_is_shadowed_; | |
| 429 | |
| 430 // True when we are in code that expects the virtual frame to be fully | |
| 431 // spilled. Some virtual frame function are disabled in DEBUG builds when | |
| 432 // called from spilled code, because they do not leave the virtual frame | |
| 433 // in a spilled state. | |
| 434 bool in_spilled_code_; | |
| 435 | |
| 436 friend class VirtualFrame; | |
| 437 friend class JumpTarget; | |
| 438 friend class Reference; | |
| 439 | |
| 440 DISALLOW_COPY_AND_ASSIGN(CodeGenerator); | |
| 441 }; | |
| 442 | |
| 443 | |
| 444 } } // namespace v8::internal | |
| 445 | |
| 446 #endif // V8_CODEGEN_ARM_H_ | |
| OLD | NEW |