| OLD | NEW |
| (Empty) |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #ifndef V8_FULL_CODEGEN_H_ | |
| 6 #define V8_FULL_CODEGEN_H_ | |
| 7 | |
| 8 #include "src/v8.h" | |
| 9 | |
| 10 #include "src/allocation.h" | |
| 11 #include "src/assert-scope.h" | |
| 12 #include "src/ast.h" | |
| 13 #include "src/bit-vector.h" | |
| 14 #include "src/code-stubs.h" | |
| 15 #include "src/codegen.h" | |
| 16 #include "src/compiler.h" | |
| 17 #include "src/globals.h" | |
| 18 #include "src/objects.h" | |
| 19 #include "src/scopes.h" | |
| 20 | |
| 21 namespace v8 { | |
| 22 namespace internal { | |
| 23 | |
| 24 // Forward declarations. | |
| 25 class JumpPatchSite; | |
| 26 | |
| 27 // ----------------------------------------------------------------------------- | |
| 28 // Full code generator. | |
| 29 | |
| 30 class FullCodeGenerator: public AstVisitor { | |
| 31 public: | |
| 32 enum State { | |
| 33 NO_REGISTERS, | |
| 34 TOS_REG | |
| 35 }; | |
| 36 | |
| 37 FullCodeGenerator(MacroAssembler* masm, CompilationInfo* info) | |
| 38 : masm_(masm), | |
| 39 info_(info), | |
| 40 scope_(info->scope()), | |
| 41 nesting_stack_(NULL), | |
| 42 loop_depth_(0), | |
| 43 try_catch_depth_(0), | |
| 44 globals_(NULL), | |
| 45 context_(NULL), | |
| 46 bailout_entries_(info->HasDeoptimizationSupport() | |
| 47 ? info->function()->ast_node_count() | |
| 48 : 0, | |
| 49 info->zone()), | |
| 50 back_edges_(2, info->zone()), | |
| 51 handler_table_(info->zone()), | |
| 52 ic_total_count_(0) { | |
| 53 DCHECK(!info->IsStub()); | |
| 54 Initialize(); | |
| 55 } | |
| 56 | |
| 57 void Initialize(); | |
| 58 | |
| 59 static bool MakeCode(CompilationInfo* info); | |
| 60 | |
| 61 // Encode state and pc-offset as a BitField<type, start, size>. | |
| 62 // Only use 30 bits because we encode the result as a smi. | |
| 63 class StateField : public BitField<State, 0, 1> { }; | |
| 64 class PcField : public BitField<unsigned, 1, 30-1> { }; | |
| 65 | |
| 66 static const char* State2String(State state) { | |
| 67 switch (state) { | |
| 68 case NO_REGISTERS: return "NO_REGISTERS"; | |
| 69 case TOS_REG: return "TOS_REG"; | |
| 70 } | |
| 71 UNREACHABLE(); | |
| 72 return NULL; | |
| 73 } | |
| 74 | |
| 75 static const int kMaxBackEdgeWeight = 127; | |
| 76 | |
| 77 // Platform-specific code size multiplier. | |
| 78 #if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87 | |
| 79 static const int kCodeSizeMultiplier = 105; | |
| 80 #elif V8_TARGET_ARCH_X64 | |
| 81 static const int kCodeSizeMultiplier = 170; | |
| 82 #elif V8_TARGET_ARCH_ARM | |
| 83 static const int kCodeSizeMultiplier = 149; | |
| 84 #elif V8_TARGET_ARCH_ARM64 | |
| 85 // TODO(all): Copied ARM value. Check this is sensible for ARM64. | |
| 86 static const int kCodeSizeMultiplier = 149; | |
| 87 #elif V8_TARGET_ARCH_PPC64 | |
| 88 static const int kCodeSizeMultiplier = 200; | |
| 89 #elif V8_TARGET_ARCH_PPC | |
| 90 static const int kCodeSizeMultiplier = 200; | |
| 91 #elif V8_TARGET_ARCH_MIPS | |
| 92 static const int kCodeSizeMultiplier = 149; | |
| 93 #elif V8_TARGET_ARCH_MIPS64 | |
| 94 static const int kCodeSizeMultiplier = 149; | |
| 95 #else | |
| 96 #error Unsupported target architecture. | |
| 97 #endif | |
| 98 | |
| 99 private: | |
| 100 class Breakable; | |
| 101 class Iteration; | |
| 102 | |
| 103 class TestContext; | |
| 104 | |
| 105 class NestedStatement BASE_EMBEDDED { | |
| 106 public: | |
| 107 explicit NestedStatement(FullCodeGenerator* codegen) : codegen_(codegen) { | |
| 108 // Link into codegen's nesting stack. | |
| 109 previous_ = codegen->nesting_stack_; | |
| 110 codegen->nesting_stack_ = this; | |
| 111 } | |
| 112 virtual ~NestedStatement() { | |
| 113 // Unlink from codegen's nesting stack. | |
| 114 DCHECK_EQ(this, codegen_->nesting_stack_); | |
| 115 codegen_->nesting_stack_ = previous_; | |
| 116 } | |
| 117 | |
| 118 virtual Breakable* AsBreakable() { return NULL; } | |
| 119 virtual Iteration* AsIteration() { return NULL; } | |
| 120 | |
| 121 virtual bool IsContinueTarget(Statement* target) { return false; } | |
| 122 virtual bool IsBreakTarget(Statement* target) { return false; } | |
| 123 | |
| 124 // Notify the statement that we are exiting it via break, continue, or | |
| 125 // return and give it a chance to generate cleanup code. Return the | |
| 126 // next outer statement in the nesting stack. We accumulate in | |
| 127 // *stack_depth the amount to drop the stack and in *context_length the | |
| 128 // number of context chain links to unwind as we traverse the nesting | |
| 129 // stack from an exit to its target. | |
| 130 virtual NestedStatement* Exit(int* stack_depth, int* context_length) { | |
| 131 return previous_; | |
| 132 } | |
| 133 | |
| 134 // Like the Exit() method above, but limited to accumulating stack depth. | |
| 135 virtual NestedStatement* AccumulateDepth(int* stack_depth) { | |
| 136 return previous_; | |
| 137 } | |
| 138 | |
| 139 protected: | |
| 140 MacroAssembler* masm() { return codegen_->masm(); } | |
| 141 | |
| 142 FullCodeGenerator* codegen_; | |
| 143 NestedStatement* previous_; | |
| 144 | |
| 145 private: | |
| 146 DISALLOW_COPY_AND_ASSIGN(NestedStatement); | |
| 147 }; | |
| 148 | |
| 149 // A breakable statement such as a block. | |
| 150 class Breakable : public NestedStatement { | |
| 151 public: | |
| 152 Breakable(FullCodeGenerator* codegen, BreakableStatement* statement) | |
| 153 : NestedStatement(codegen), statement_(statement) { | |
| 154 } | |
| 155 virtual ~Breakable() {} | |
| 156 | |
| 157 virtual Breakable* AsBreakable() { return this; } | |
| 158 virtual bool IsBreakTarget(Statement* target) { | |
| 159 return statement() == target; | |
| 160 } | |
| 161 | |
| 162 BreakableStatement* statement() { return statement_; } | |
| 163 Label* break_label() { return &break_label_; } | |
| 164 | |
| 165 private: | |
| 166 BreakableStatement* statement_; | |
| 167 Label break_label_; | |
| 168 }; | |
| 169 | |
| 170 // An iteration statement such as a while, for, or do loop. | |
| 171 class Iteration : public Breakable { | |
| 172 public: | |
| 173 Iteration(FullCodeGenerator* codegen, IterationStatement* statement) | |
| 174 : Breakable(codegen, statement) { | |
| 175 } | |
| 176 virtual ~Iteration() {} | |
| 177 | |
| 178 virtual Iteration* AsIteration() { return this; } | |
| 179 virtual bool IsContinueTarget(Statement* target) { | |
| 180 return statement() == target; | |
| 181 } | |
| 182 | |
| 183 Label* continue_label() { return &continue_label_; } | |
| 184 | |
| 185 private: | |
| 186 Label continue_label_; | |
| 187 }; | |
| 188 | |
| 189 // A nested block statement. | |
| 190 class NestedBlock : public Breakable { | |
| 191 public: | |
| 192 NestedBlock(FullCodeGenerator* codegen, Block* block) | |
| 193 : Breakable(codegen, block) { | |
| 194 } | |
| 195 virtual ~NestedBlock() {} | |
| 196 | |
| 197 virtual NestedStatement* Exit(int* stack_depth, int* context_length) { | |
| 198 auto block_scope = statement()->AsBlock()->scope(); | |
| 199 if (block_scope != nullptr) { | |
| 200 if (block_scope->ContextLocalCount() > 0) ++(*context_length); | |
| 201 } | |
| 202 return previous_; | |
| 203 } | |
| 204 }; | |
| 205 | |
| 206 // The try block of a try/catch statement. | |
| 207 class TryCatch : public NestedStatement { | |
| 208 public: | |
| 209 static const int kElementCount = TryBlockConstant::kElementCount; | |
| 210 | |
| 211 explicit TryCatch(FullCodeGenerator* codegen) : NestedStatement(codegen) {} | |
| 212 virtual ~TryCatch() {} | |
| 213 | |
| 214 virtual NestedStatement* Exit(int* stack_depth, int* context_length) { | |
| 215 *stack_depth += kElementCount; | |
| 216 return previous_; | |
| 217 } | |
| 218 virtual NestedStatement* AccumulateDepth(int* stack_depth) { | |
| 219 *stack_depth += kElementCount; | |
| 220 return previous_; | |
| 221 } | |
| 222 }; | |
| 223 | |
| 224 // The try block of a try/finally statement. | |
| 225 class TryFinally : public NestedStatement { | |
| 226 public: | |
| 227 static const int kElementCount = TryBlockConstant::kElementCount; | |
| 228 | |
| 229 TryFinally(FullCodeGenerator* codegen, Label* finally_entry) | |
| 230 : NestedStatement(codegen), finally_entry_(finally_entry) { | |
| 231 } | |
| 232 virtual ~TryFinally() {} | |
| 233 | |
| 234 virtual NestedStatement* Exit(int* stack_depth, int* context_length); | |
| 235 virtual NestedStatement* AccumulateDepth(int* stack_depth) { | |
| 236 *stack_depth += kElementCount; | |
| 237 return previous_; | |
| 238 } | |
| 239 | |
| 240 private: | |
| 241 Label* finally_entry_; | |
| 242 }; | |
| 243 | |
| 244 // The finally block of a try/finally statement. | |
| 245 class Finally : public NestedStatement { | |
| 246 public: | |
| 247 static const int kElementCount = 3; | |
| 248 | |
| 249 explicit Finally(FullCodeGenerator* codegen) : NestedStatement(codegen) {} | |
| 250 virtual ~Finally() {} | |
| 251 | |
| 252 virtual NestedStatement* Exit(int* stack_depth, int* context_length) { | |
| 253 *stack_depth += kElementCount; | |
| 254 return previous_; | |
| 255 } | |
| 256 virtual NestedStatement* AccumulateDepth(int* stack_depth) { | |
| 257 *stack_depth += kElementCount; | |
| 258 return previous_; | |
| 259 } | |
| 260 }; | |
| 261 | |
| 262 // The body of a for/in loop. | |
| 263 class ForIn : public Iteration { | |
| 264 public: | |
| 265 static const int kElementCount = 5; | |
| 266 | |
| 267 ForIn(FullCodeGenerator* codegen, ForInStatement* statement) | |
| 268 : Iteration(codegen, statement) { | |
| 269 } | |
| 270 virtual ~ForIn() {} | |
| 271 | |
| 272 virtual NestedStatement* Exit(int* stack_depth, int* context_length) { | |
| 273 *stack_depth += kElementCount; | |
| 274 return previous_; | |
| 275 } | |
| 276 virtual NestedStatement* AccumulateDepth(int* stack_depth) { | |
| 277 *stack_depth += kElementCount; | |
| 278 return previous_; | |
| 279 } | |
| 280 }; | |
| 281 | |
| 282 | |
| 283 // The body of a with or catch. | |
| 284 class WithOrCatch : public NestedStatement { | |
| 285 public: | |
| 286 explicit WithOrCatch(FullCodeGenerator* codegen) | |
| 287 : NestedStatement(codegen) { | |
| 288 } | |
| 289 virtual ~WithOrCatch() {} | |
| 290 | |
| 291 virtual NestedStatement* Exit(int* stack_depth, int* context_length) { | |
| 292 ++(*context_length); | |
| 293 return previous_; | |
| 294 } | |
| 295 }; | |
| 296 | |
| 297 // A platform-specific utility to overwrite the accumulator register | |
| 298 // with a GC-safe value. | |
| 299 void ClearAccumulator(); | |
| 300 | |
| 301 // Determine whether or not to inline the smi case for the given | |
| 302 // operation. | |
| 303 bool ShouldInlineSmiCase(Token::Value op); | |
| 304 | |
| 305 // Helper function to convert a pure value into a test context. The value | |
| 306 // is expected on the stack or the accumulator, depending on the platform. | |
| 307 // See the platform-specific implementation for details. | |
| 308 void DoTest(Expression* condition, | |
| 309 Label* if_true, | |
| 310 Label* if_false, | |
| 311 Label* fall_through); | |
| 312 void DoTest(const TestContext* context); | |
| 313 | |
| 314 // Helper function to split control flow and avoid a branch to the | |
| 315 // fall-through label if it is set up. | |
| 316 #if V8_TARGET_ARCH_MIPS | |
| 317 void Split(Condition cc, | |
| 318 Register lhs, | |
| 319 const Operand& rhs, | |
| 320 Label* if_true, | |
| 321 Label* if_false, | |
| 322 Label* fall_through); | |
| 323 #elif V8_TARGET_ARCH_MIPS64 | |
| 324 void Split(Condition cc, | |
| 325 Register lhs, | |
| 326 const Operand& rhs, | |
| 327 Label* if_true, | |
| 328 Label* if_false, | |
| 329 Label* fall_through); | |
| 330 #elif V8_TARGET_ARCH_PPC | |
| 331 void Split(Condition cc, Label* if_true, Label* if_false, Label* fall_through, | |
| 332 CRegister cr = cr7); | |
| 333 #else // All other arch. | |
| 334 void Split(Condition cc, | |
| 335 Label* if_true, | |
| 336 Label* if_false, | |
| 337 Label* fall_through); | |
| 338 #endif | |
| 339 | |
| 340 // Load the value of a known (PARAMETER, LOCAL, or CONTEXT) variable into | |
| 341 // a register. Emits a context chain walk if if necessary (so does | |
| 342 // SetVar) so avoid calling both on the same variable. | |
| 343 void GetVar(Register destination, Variable* var); | |
| 344 | |
| 345 // Assign to a known (PARAMETER, LOCAL, or CONTEXT) variable. If it's in | |
| 346 // the context, the write barrier will be emitted and source, scratch0, | |
| 347 // scratch1 will be clobbered. Emits a context chain walk if if necessary | |
| 348 // (so does GetVar) so avoid calling both on the same variable. | |
| 349 void SetVar(Variable* var, | |
| 350 Register source, | |
| 351 Register scratch0, | |
| 352 Register scratch1); | |
| 353 | |
| 354 // An operand used to read/write a stack-allocated (PARAMETER or LOCAL) | |
| 355 // variable. Writing does not need the write barrier. | |
| 356 MemOperand StackOperand(Variable* var); | |
| 357 | |
| 358 // An operand used to read/write a known (PARAMETER, LOCAL, or CONTEXT) | |
| 359 // variable. May emit code to traverse the context chain, loading the | |
| 360 // found context into the scratch register. Writing to this operand will | |
| 361 // need the write barrier if location is CONTEXT. | |
| 362 MemOperand VarOperand(Variable* var, Register scratch); | |
| 363 | |
| 364 void VisitForEffect(Expression* expr) { | |
| 365 EffectContext context(this); | |
| 366 Visit(expr); | |
| 367 PrepareForBailout(expr, NO_REGISTERS); | |
| 368 } | |
| 369 | |
| 370 void VisitForAccumulatorValue(Expression* expr) { | |
| 371 AccumulatorValueContext context(this); | |
| 372 Visit(expr); | |
| 373 PrepareForBailout(expr, TOS_REG); | |
| 374 } | |
| 375 | |
| 376 void VisitForStackValue(Expression* expr) { | |
| 377 StackValueContext context(this); | |
| 378 Visit(expr); | |
| 379 PrepareForBailout(expr, NO_REGISTERS); | |
| 380 } | |
| 381 | |
| 382 void VisitForControl(Expression* expr, | |
| 383 Label* if_true, | |
| 384 Label* if_false, | |
| 385 Label* fall_through) { | |
| 386 TestContext context(this, expr, if_true, if_false, fall_through); | |
| 387 Visit(expr); | |
| 388 // For test contexts, we prepare for bailout before branching, not at | |
| 389 // the end of the entire expression. This happens as part of visiting | |
| 390 // the expression. | |
| 391 } | |
| 392 | |
| 393 void VisitInDuplicateContext(Expression* expr); | |
| 394 | |
| 395 void VisitDeclarations(ZoneList<Declaration*>* declarations) override; | |
| 396 void DeclareModules(Handle<FixedArray> descriptions); | |
| 397 void DeclareGlobals(Handle<FixedArray> pairs); | |
| 398 int DeclareGlobalsFlags(); | |
| 399 | |
| 400 // Generate code to create an iterator result object. The "value" property is | |
| 401 // set to a value popped from the stack, and "done" is set according to the | |
| 402 // argument. The result object is left in the result register. | |
| 403 void EmitCreateIteratorResult(bool done); | |
| 404 | |
| 405 // Try to perform a comparison as a fast inlined literal compare if | |
| 406 // the operands allow it. Returns true if the compare operations | |
| 407 // has been matched and all code generated; false otherwise. | |
| 408 bool TryLiteralCompare(CompareOperation* compare); | |
| 409 | |
| 410 // Platform-specific code for comparing the type of a value with | |
| 411 // a given literal string. | |
| 412 void EmitLiteralCompareTypeof(Expression* expr, | |
| 413 Expression* sub_expr, | |
| 414 Handle<String> check); | |
| 415 | |
| 416 // Platform-specific code for equality comparison with a nil-like value. | |
| 417 void EmitLiteralCompareNil(CompareOperation* expr, | |
| 418 Expression* sub_expr, | |
| 419 NilValue nil); | |
| 420 | |
| 421 // Bailout support. | |
| 422 void PrepareForBailout(Expression* node, State state); | |
| 423 void PrepareForBailoutForId(BailoutId id, State state); | |
| 424 | |
| 425 // Feedback slot support. The feedback vector will be cleared during gc and | |
| 426 // collected by the type-feedback oracle. | |
| 427 Handle<TypeFeedbackVector> FeedbackVector() const { | |
| 428 return info_->feedback_vector(); | |
| 429 } | |
| 430 void EnsureSlotContainsAllocationSite(FeedbackVectorSlot slot); | |
| 431 void EnsureSlotContainsAllocationSite(FeedbackVectorICSlot slot); | |
| 432 | |
| 433 // Returns a smi for the index into the FixedArray that backs the feedback | |
| 434 // vector | |
| 435 Smi* SmiFromSlot(FeedbackVectorSlot slot) const { | |
| 436 return Smi::FromInt(FeedbackVector()->GetIndex(slot)); | |
| 437 } | |
| 438 | |
| 439 Smi* SmiFromSlot(FeedbackVectorICSlot slot) const { | |
| 440 return Smi::FromInt(FeedbackVector()->GetIndex(slot)); | |
| 441 } | |
| 442 | |
| 443 // Record a call's return site offset, used to rebuild the frame if the | |
| 444 // called function was inlined at the site. | |
| 445 void RecordJSReturnSite(Call* call); | |
| 446 | |
| 447 // Prepare for bailout before a test (or compare) and branch. If | |
| 448 // should_normalize, then the following comparison will not handle the | |
| 449 // canonical JS true value so we will insert a (dead) test against true at | |
| 450 // the actual bailout target from the optimized code. If not | |
| 451 // should_normalize, the true and false labels are ignored. | |
| 452 void PrepareForBailoutBeforeSplit(Expression* expr, | |
| 453 bool should_normalize, | |
| 454 Label* if_true, | |
| 455 Label* if_false); | |
| 456 | |
| 457 // If enabled, emit debug code for checking that the current context is | |
| 458 // neither a with nor a catch context. | |
| 459 void EmitDebugCheckDeclarationContext(Variable* variable); | |
| 460 | |
| 461 // This is meant to be called at loop back edges, |back_edge_target| is | |
| 462 // the jump target of the back edge and is used to approximate the amount | |
| 463 // of code inside the loop. | |
| 464 void EmitBackEdgeBookkeeping(IterationStatement* stmt, | |
| 465 Label* back_edge_target); | |
| 466 // Record the OSR AST id corresponding to a back edge in the code. | |
| 467 void RecordBackEdge(BailoutId osr_ast_id); | |
| 468 // Emit a table of back edge ids, pcs and loop depths into the code stream. | |
| 469 // Return the offset of the start of the table. | |
| 470 unsigned EmitBackEdgeTable(); | |
| 471 | |
| 472 void EmitProfilingCounterDecrement(int delta); | |
| 473 void EmitProfilingCounterReset(); | |
| 474 | |
| 475 // Emit code to pop values from the stack associated with nested statements | |
| 476 // like try/catch, try/finally, etc, running the finallies and unwinding the | |
| 477 // handlers as needed. | |
| 478 void EmitUnwindBeforeReturn(); | |
| 479 | |
| 480 // Platform-specific return sequence | |
| 481 void EmitReturnSequence(); | |
| 482 | |
| 483 // Platform-specific code sequences for calls | |
| 484 void EmitCall(Call* expr, CallICState::CallType = CallICState::FUNCTION); | |
| 485 void EmitSuperConstructorCall(Call* expr); | |
| 486 void EmitCallWithLoadIC(Call* expr); | |
| 487 void EmitSuperCallWithLoadIC(Call* expr); | |
| 488 void EmitKeyedCallWithLoadIC(Call* expr, Expression* key); | |
| 489 void EmitKeyedSuperCallWithLoadIC(Call* expr); | |
| 490 | |
| 491 #define FOR_EACH_FULL_CODE_INTRINSIC(F) \ | |
| 492 F(IsSmi) \ | |
| 493 F(IsNonNegativeSmi) \ | |
| 494 F(IsArray) \ | |
| 495 F(IsTypedArray) \ | |
| 496 F(IsRegExp) \ | |
| 497 F(IsJSProxy) \ | |
| 498 F(IsConstructCall) \ | |
| 499 F(CallFunction) \ | |
| 500 F(DefaultConstructorCallSuper) \ | |
| 501 F(ArgumentsLength) \ | |
| 502 F(Arguments) \ | |
| 503 F(ValueOf) \ | |
| 504 F(SetValueOf) \ | |
| 505 F(IsDate) \ | |
| 506 F(DateField) \ | |
| 507 F(StringCharFromCode) \ | |
| 508 F(StringCharAt) \ | |
| 509 F(OneByteSeqStringSetChar) \ | |
| 510 F(TwoByteSeqStringSetChar) \ | |
| 511 F(ObjectEquals) \ | |
| 512 F(IsObject) \ | |
| 513 F(IsFunction) \ | |
| 514 F(IsUndetectableObject) \ | |
| 515 F(IsSpecObject) \ | |
| 516 F(IsStringWrapperSafeForDefaultValueOf) \ | |
| 517 F(MathPow) \ | |
| 518 F(IsMinusZero) \ | |
| 519 F(HasCachedArrayIndex) \ | |
| 520 F(GetCachedArrayIndex) \ | |
| 521 F(FastOneByteArrayJoin) \ | |
| 522 F(GeneratorNext) \ | |
| 523 F(GeneratorThrow) \ | |
| 524 F(DebugBreakInOptimizedCode) \ | |
| 525 F(ClassOf) \ | |
| 526 F(StringCharCodeAt) \ | |
| 527 F(StringAdd) \ | |
| 528 F(SubString) \ | |
| 529 F(StringCompare) \ | |
| 530 F(RegExpExec) \ | |
| 531 F(RegExpConstructResult) \ | |
| 532 F(GetFromCache) \ | |
| 533 F(NumberToString) \ | |
| 534 F(DebugIsActive) | |
| 535 | |
| 536 #define GENERATOR_DECLARATION(Name) void Emit##Name(CallRuntime* call); | |
| 537 FOR_EACH_FULL_CODE_INTRINSIC(GENERATOR_DECLARATION) | |
| 538 #undef GENERATOR_DECLARATION | |
| 539 | |
| 540 // Platform-specific code for resuming generators. | |
| 541 void EmitGeneratorResume(Expression *generator, | |
| 542 Expression *value, | |
| 543 JSGeneratorObject::ResumeMode resume_mode); | |
| 544 | |
| 545 // Platform-specific code for loading variables. | |
| 546 void EmitLoadGlobalCheckExtensions(VariableProxy* proxy, | |
| 547 TypeofMode typeof_mode, Label* slow); | |
| 548 MemOperand ContextSlotOperandCheckExtensions(Variable* var, Label* slow); | |
| 549 void EmitDynamicLookupFastCase(VariableProxy* proxy, TypeofMode typeof_mode, | |
| 550 Label* slow, Label* done); | |
| 551 void EmitGlobalVariableLoad(VariableProxy* proxy, TypeofMode typeof_mode); | |
| 552 void EmitVariableLoad(VariableProxy* proxy, | |
| 553 TypeofMode typeof_mode = NOT_INSIDE_TYPEOF); | |
| 554 | |
| 555 void EmitAccessor(Expression* expression); | |
| 556 | |
| 557 // Expects the arguments and the function already pushed. | |
| 558 void EmitResolvePossiblyDirectEval(int arg_count); | |
| 559 | |
| 560 // Platform-specific support for allocating a new closure based on | |
| 561 // the given function info. | |
| 562 void EmitNewClosure(Handle<SharedFunctionInfo> info, bool pretenure); | |
| 563 | |
| 564 // Re-usable portions of CallRuntime | |
| 565 void EmitLoadJSRuntimeFunction(CallRuntime* expr); | |
| 566 void EmitCallJSRuntimeFunction(CallRuntime* expr); | |
| 567 | |
| 568 // Load a value from a named property. | |
| 569 // The receiver is left on the stack by the IC. | |
| 570 void EmitNamedPropertyLoad(Property* expr); | |
| 571 | |
| 572 // Load a value from super.named property. | |
| 573 // Expect receiver ('this' value) and home_object on the stack. | |
| 574 void EmitNamedSuperPropertyLoad(Property* expr); | |
| 575 | |
| 576 // Load a value from super[keyed] property. | |
| 577 // Expect receiver ('this' value), home_object and key on the stack. | |
| 578 void EmitKeyedSuperPropertyLoad(Property* expr); | |
| 579 | |
| 580 // Load a value from a keyed property. | |
| 581 // The receiver and the key is left on the stack by the IC. | |
| 582 void EmitKeyedPropertyLoad(Property* expr); | |
| 583 | |
| 584 // Adds the properties to the class (function) object and to its prototype. | |
| 585 // Expects the class (function) in the accumulator. The class (function) is | |
| 586 // in the accumulator after installing all the properties. | |
| 587 void EmitClassDefineProperties(ClassLiteral* lit, int* used_store_slots); | |
| 588 | |
| 589 // Pushes the property key as a Name on the stack. | |
| 590 void EmitPropertyKey(ObjectLiteralProperty* property, BailoutId bailout_id); | |
| 591 | |
| 592 // Apply the compound assignment operator. Expects the left operand on top | |
| 593 // of the stack and the right one in the accumulator. | |
| 594 void EmitBinaryOp(BinaryOperation* expr, Token::Value op); | |
| 595 | |
| 596 // Helper functions for generating inlined smi code for certain | |
| 597 // binary operations. | |
| 598 void EmitInlineSmiBinaryOp(BinaryOperation* expr, | |
| 599 Token::Value op, | |
| 600 Expression* left, | |
| 601 Expression* right); | |
| 602 | |
| 603 // Assign to the given expression as if via '='. The right-hand-side value | |
| 604 // is expected in the accumulator. slot is only used if FLAG_vector_stores | |
| 605 // is true. | |
| 606 void EmitAssignment(Expression* expr, FeedbackVectorICSlot slot); | |
| 607 | |
| 608 // Complete a variable assignment. The right-hand-side value is expected | |
| 609 // in the accumulator. | |
| 610 void EmitVariableAssignment(Variable* var, Token::Value op, | |
| 611 FeedbackVectorICSlot slot); | |
| 612 | |
| 613 // Helper functions to EmitVariableAssignment | |
| 614 void EmitStoreToStackLocalOrContextSlot(Variable* var, | |
| 615 MemOperand location); | |
| 616 | |
| 617 // Complete a named property assignment. The receiver is expected on top | |
| 618 // of the stack and the right-hand-side value in the accumulator. | |
| 619 void EmitNamedPropertyAssignment(Assignment* expr); | |
| 620 | |
| 621 // Complete a super named property assignment. The right-hand-side value | |
| 622 // is expected in accumulator. | |
| 623 void EmitNamedSuperPropertyStore(Property* prop); | |
| 624 | |
| 625 // Complete a super named property assignment. The right-hand-side value | |
| 626 // is expected in accumulator. | |
| 627 void EmitKeyedSuperPropertyStore(Property* prop); | |
| 628 | |
| 629 // Complete a keyed property assignment. The receiver and key are | |
| 630 // expected on top of the stack and the right-hand-side value in the | |
| 631 // accumulator. | |
| 632 void EmitKeyedPropertyAssignment(Assignment* expr); | |
| 633 | |
| 634 static bool NeedsHomeObject(Expression* expr) { | |
| 635 return FunctionLiteral::NeedsHomeObject(expr); | |
| 636 } | |
| 637 | |
| 638 // Adds the [[HomeObject]] to |initializer| if it is a FunctionLiteral. | |
| 639 // The value of the initializer is expected to be at the top of the stack. | |
| 640 // |offset| is the offset in the stack where the home object can be found. | |
| 641 void EmitSetHomeObjectIfNeeded( | |
| 642 Expression* initializer, int offset, | |
| 643 FeedbackVectorICSlot slot = FeedbackVectorICSlot::Invalid()); | |
| 644 | |
| 645 void EmitLoadSuperConstructor(SuperCallReference* super_call_ref); | |
| 646 | |
| 647 void CallIC(Handle<Code> code, | |
| 648 TypeFeedbackId id = TypeFeedbackId::None()); | |
| 649 | |
| 650 // Inside typeof reference errors are never thrown. | |
| 651 void CallLoadIC(TypeofMode typeof_mode, LanguageMode language_mode = SLOPPY, | |
| 652 TypeFeedbackId id = TypeFeedbackId::None()); | |
| 653 void CallStoreIC(TypeFeedbackId id = TypeFeedbackId::None()); | |
| 654 | |
| 655 void SetFunctionPosition(FunctionLiteral* fun); | |
| 656 void SetReturnPosition(FunctionLiteral* fun); | |
| 657 | |
| 658 enum InsertBreak { INSERT_BREAK, SKIP_BREAK }; | |
| 659 | |
| 660 // During stepping we want to be able to break at each statement, but not at | |
| 661 // every (sub-)expression. That is why by default we insert breaks at every | |
| 662 // statement position, but not at every expression position, unless stated | |
| 663 // otherwise. | |
| 664 void SetStatementPosition(Statement* stmt, | |
| 665 InsertBreak insert_break = INSERT_BREAK); | |
| 666 void SetExpressionPosition(Expression* expr, | |
| 667 InsertBreak insert_break = SKIP_BREAK); | |
| 668 | |
| 669 // Consider an expression a statement. As such, we also insert a break. | |
| 670 // This is used in loop headers where we want to break for each iteration. | |
| 671 void SetExpressionAsStatementPosition(Expression* expr); | |
| 672 | |
| 673 void SetCallPosition(Expression* expr, int argc); | |
| 674 | |
| 675 void SetConstructCallPosition(Expression* expr); | |
| 676 | |
| 677 // Non-local control flow support. | |
| 678 void EnterTryBlock(int handler_index, Label* handler); | |
| 679 void ExitTryBlock(int handler_index); | |
| 680 void EnterFinallyBlock(); | |
| 681 void ExitFinallyBlock(); | |
| 682 void ClearPendingMessage(); | |
| 683 | |
| 684 // Loop nesting counter. | |
| 685 int loop_depth() { return loop_depth_; } | |
| 686 void increment_loop_depth() { loop_depth_++; } | |
| 687 void decrement_loop_depth() { | |
| 688 DCHECK(loop_depth_ > 0); | |
| 689 loop_depth_--; | |
| 690 } | |
| 691 | |
| 692 MacroAssembler* masm() const { return masm_; } | |
| 693 | |
| 694 class ExpressionContext; | |
| 695 const ExpressionContext* context() { return context_; } | |
| 696 void set_new_context(const ExpressionContext* context) { context_ = context; } | |
| 697 | |
| 698 Handle<Script> script() { return info_->script(); } | |
| 699 bool is_eval() { return info_->is_eval(); } | |
| 700 bool is_native() { return info_->is_native(); } | |
| 701 LanguageMode language_mode() { return function()->language_mode(); } | |
| 702 bool is_simple_parameter_list() { return info_->is_simple_parameter_list(); } | |
| 703 FunctionLiteral* function() { return info_->function(); } | |
| 704 Scope* scope() { return scope_; } | |
| 705 | |
| 706 static Register result_register(); | |
| 707 static Register context_register(); | |
| 708 | |
| 709 // Set fields in the stack frame. Offsets are the frame pointer relative | |
| 710 // offsets defined in, e.g., StandardFrameConstants. | |
| 711 void StoreToFrameField(int frame_offset, Register value); | |
| 712 | |
| 713 // Load a value from the current context. Indices are defined as an enum | |
| 714 // in v8::internal::Context. | |
| 715 void LoadContextField(Register dst, int context_index); | |
| 716 | |
| 717 // Push the function argument for the runtime functions PushWithContext | |
| 718 // and PushCatchContext. | |
| 719 void PushFunctionArgumentForContextAllocation(); | |
| 720 | |
| 721 void PushCalleeAndWithBaseObject(Call* expr); | |
| 722 | |
| 723 // AST node visit functions. | |
| 724 #define DECLARE_VISIT(type) virtual void Visit##type(type* node) override; | |
| 725 AST_NODE_LIST(DECLARE_VISIT) | |
| 726 #undef DECLARE_VISIT | |
| 727 | |
| 728 void VisitComma(BinaryOperation* expr); | |
| 729 void VisitLogicalExpression(BinaryOperation* expr); | |
| 730 void VisitArithmeticExpression(BinaryOperation* expr); | |
| 731 | |
| 732 void VisitForTypeofValue(Expression* expr); | |
| 733 | |
| 734 void Generate(); | |
| 735 void PopulateDeoptimizationData(Handle<Code> code); | |
| 736 void PopulateTypeFeedbackInfo(Handle<Code> code); | |
| 737 void PopulateHandlerTable(Handle<Code> code); | |
| 738 | |
| 739 bool MustCreateObjectLiteralWithRuntime(ObjectLiteral* expr) const; | |
| 740 bool MustCreateArrayLiteralWithRuntime(ArrayLiteral* expr) const; | |
| 741 | |
| 742 void EmitLoadStoreICSlot(FeedbackVectorICSlot slot); | |
| 743 | |
| 744 int NewHandlerTableEntry(); | |
| 745 | |
| 746 struct BailoutEntry { | |
| 747 BailoutId id; | |
| 748 unsigned pc_and_state; | |
| 749 }; | |
| 750 | |
| 751 struct BackEdgeEntry { | |
| 752 BailoutId id; | |
| 753 unsigned pc; | |
| 754 uint32_t loop_depth; | |
| 755 }; | |
| 756 | |
| 757 struct HandlerTableEntry { | |
| 758 unsigned range_start; | |
| 759 unsigned range_end; | |
| 760 unsigned handler_offset; | |
| 761 int stack_depth; | |
| 762 int try_catch_depth; | |
| 763 }; | |
| 764 | |
| 765 class ExpressionContext BASE_EMBEDDED { | |
| 766 public: | |
| 767 explicit ExpressionContext(FullCodeGenerator* codegen) | |
| 768 : masm_(codegen->masm()), old_(codegen->context()), codegen_(codegen) { | |
| 769 codegen->set_new_context(this); | |
| 770 } | |
| 771 | |
| 772 virtual ~ExpressionContext() { | |
| 773 codegen_->set_new_context(old_); | |
| 774 } | |
| 775 | |
| 776 Isolate* isolate() const { return codegen_->isolate(); } | |
| 777 | |
| 778 // Convert constant control flow (true or false) to the result expected for | |
| 779 // this expression context. | |
| 780 virtual void Plug(bool flag) const = 0; | |
| 781 | |
| 782 // Emit code to convert a pure value (in a register, known variable | |
| 783 // location, as a literal, or on top of the stack) into the result | |
| 784 // expected according to this expression context. | |
| 785 virtual void Plug(Register reg) const = 0; | |
| 786 virtual void Plug(Variable* var) const = 0; | |
| 787 virtual void Plug(Handle<Object> lit) const = 0; | |
| 788 virtual void Plug(Heap::RootListIndex index) const = 0; | |
| 789 virtual void PlugTOS() const = 0; | |
| 790 | |
| 791 // Emit code to convert pure control flow to a pair of unbound labels into | |
| 792 // the result expected according to this expression context. The | |
| 793 // implementation will bind both labels unless it's a TestContext, which | |
| 794 // won't bind them at this point. | |
| 795 virtual void Plug(Label* materialize_true, | |
| 796 Label* materialize_false) const = 0; | |
| 797 | |
| 798 // Emit code to discard count elements from the top of stack, then convert | |
| 799 // a pure value into the result expected according to this expression | |
| 800 // context. | |
| 801 virtual void DropAndPlug(int count, Register reg) const = 0; | |
| 802 | |
| 803 // Set up branch labels for a test expression. The three Label** parameters | |
| 804 // are output parameters. | |
| 805 virtual void PrepareTest(Label* materialize_true, | |
| 806 Label* materialize_false, | |
| 807 Label** if_true, | |
| 808 Label** if_false, | |
| 809 Label** fall_through) const = 0; | |
| 810 | |
| 811 // Returns true if we are evaluating only for side effects (i.e. if the | |
| 812 // result will be discarded). | |
| 813 virtual bool IsEffect() const { return false; } | |
| 814 | |
| 815 // Returns true if we are evaluating for the value (in accu/on stack). | |
| 816 virtual bool IsAccumulatorValue() const { return false; } | |
| 817 virtual bool IsStackValue() const { return false; } | |
| 818 | |
| 819 // Returns true if we are branching on the value rather than materializing | |
| 820 // it. Only used for asserts. | |
| 821 virtual bool IsTest() const { return false; } | |
| 822 | |
| 823 protected: | |
| 824 FullCodeGenerator* codegen() const { return codegen_; } | |
| 825 MacroAssembler* masm() const { return masm_; } | |
| 826 MacroAssembler* masm_; | |
| 827 | |
| 828 private: | |
| 829 const ExpressionContext* old_; | |
| 830 FullCodeGenerator* codegen_; | |
| 831 }; | |
| 832 | |
| 833 class AccumulatorValueContext : public ExpressionContext { | |
| 834 public: | |
| 835 explicit AccumulatorValueContext(FullCodeGenerator* codegen) | |
| 836 : ExpressionContext(codegen) { } | |
| 837 | |
| 838 virtual void Plug(bool flag) const; | |
| 839 virtual void Plug(Register reg) const; | |
| 840 virtual void Plug(Label* materialize_true, Label* materialize_false) const; | |
| 841 virtual void Plug(Variable* var) const; | |
| 842 virtual void Plug(Handle<Object> lit) const; | |
| 843 virtual void Plug(Heap::RootListIndex) const; | |
| 844 virtual void PlugTOS() const; | |
| 845 virtual void DropAndPlug(int count, Register reg) const; | |
| 846 virtual void PrepareTest(Label* materialize_true, | |
| 847 Label* materialize_false, | |
| 848 Label** if_true, | |
| 849 Label** if_false, | |
| 850 Label** fall_through) const; | |
| 851 virtual bool IsAccumulatorValue() const { return true; } | |
| 852 }; | |
| 853 | |
| 854 class StackValueContext : public ExpressionContext { | |
| 855 public: | |
| 856 explicit StackValueContext(FullCodeGenerator* codegen) | |
| 857 : ExpressionContext(codegen) { } | |
| 858 | |
| 859 virtual void Plug(bool flag) const; | |
| 860 virtual void Plug(Register reg) const; | |
| 861 virtual void Plug(Label* materialize_true, Label* materialize_false) const; | |
| 862 virtual void Plug(Variable* var) const; | |
| 863 virtual void Plug(Handle<Object> lit) const; | |
| 864 virtual void Plug(Heap::RootListIndex) const; | |
| 865 virtual void PlugTOS() const; | |
| 866 virtual void DropAndPlug(int count, Register reg) const; | |
| 867 virtual void PrepareTest(Label* materialize_true, | |
| 868 Label* materialize_false, | |
| 869 Label** if_true, | |
| 870 Label** if_false, | |
| 871 Label** fall_through) const; | |
| 872 virtual bool IsStackValue() const { return true; } | |
| 873 }; | |
| 874 | |
| 875 class TestContext : public ExpressionContext { | |
| 876 public: | |
| 877 TestContext(FullCodeGenerator* codegen, | |
| 878 Expression* condition, | |
| 879 Label* true_label, | |
| 880 Label* false_label, | |
| 881 Label* fall_through) | |
| 882 : ExpressionContext(codegen), | |
| 883 condition_(condition), | |
| 884 true_label_(true_label), | |
| 885 false_label_(false_label), | |
| 886 fall_through_(fall_through) { } | |
| 887 | |
| 888 static const TestContext* cast(const ExpressionContext* context) { | |
| 889 DCHECK(context->IsTest()); | |
| 890 return reinterpret_cast<const TestContext*>(context); | |
| 891 } | |
| 892 | |
| 893 Expression* condition() const { return condition_; } | |
| 894 Label* true_label() const { return true_label_; } | |
| 895 Label* false_label() const { return false_label_; } | |
| 896 Label* fall_through() const { return fall_through_; } | |
| 897 | |
| 898 virtual void Plug(bool flag) const; | |
| 899 virtual void Plug(Register reg) const; | |
| 900 virtual void Plug(Label* materialize_true, Label* materialize_false) const; | |
| 901 virtual void Plug(Variable* var) const; | |
| 902 virtual void Plug(Handle<Object> lit) const; | |
| 903 virtual void Plug(Heap::RootListIndex) const; | |
| 904 virtual void PlugTOS() const; | |
| 905 virtual void DropAndPlug(int count, Register reg) const; | |
| 906 virtual void PrepareTest(Label* materialize_true, | |
| 907 Label* materialize_false, | |
| 908 Label** if_true, | |
| 909 Label** if_false, | |
| 910 Label** fall_through) const; | |
| 911 virtual bool IsTest() const { return true; } | |
| 912 | |
| 913 private: | |
| 914 Expression* condition_; | |
| 915 Label* true_label_; | |
| 916 Label* false_label_; | |
| 917 Label* fall_through_; | |
| 918 }; | |
| 919 | |
| 920 class EffectContext : public ExpressionContext { | |
| 921 public: | |
| 922 explicit EffectContext(FullCodeGenerator* codegen) | |
| 923 : ExpressionContext(codegen) { } | |
| 924 | |
| 925 virtual void Plug(bool flag) const; | |
| 926 virtual void Plug(Register reg) const; | |
| 927 virtual void Plug(Label* materialize_true, Label* materialize_false) const; | |
| 928 virtual void Plug(Variable* var) const; | |
| 929 virtual void Plug(Handle<Object> lit) const; | |
| 930 virtual void Plug(Heap::RootListIndex) const; | |
| 931 virtual void PlugTOS() const; | |
| 932 virtual void DropAndPlug(int count, Register reg) const; | |
| 933 virtual void PrepareTest(Label* materialize_true, | |
| 934 Label* materialize_false, | |
| 935 Label** if_true, | |
| 936 Label** if_false, | |
| 937 Label** fall_through) const; | |
| 938 virtual bool IsEffect() const { return true; } | |
| 939 }; | |
| 940 | |
| 941 class EnterBlockScopeIfNeeded { | |
| 942 public: | |
| 943 EnterBlockScopeIfNeeded(FullCodeGenerator* codegen, Scope* scope, | |
| 944 BailoutId entry_id, BailoutId declarations_id, | |
| 945 BailoutId exit_id); | |
| 946 ~EnterBlockScopeIfNeeded(); | |
| 947 | |
| 948 private: | |
| 949 MacroAssembler* masm() const { return codegen_->masm(); } | |
| 950 | |
| 951 FullCodeGenerator* codegen_; | |
| 952 Scope* saved_scope_; | |
| 953 BailoutId exit_id_; | |
| 954 bool needs_block_context_; | |
| 955 }; | |
| 956 | |
| 957 MacroAssembler* masm_; | |
| 958 CompilationInfo* info_; | |
| 959 Scope* scope_; | |
| 960 Label return_label_; | |
| 961 NestedStatement* nesting_stack_; | |
| 962 int loop_depth_; | |
| 963 int try_catch_depth_; | |
| 964 ZoneList<Handle<Object> >* globals_; | |
| 965 Handle<FixedArray> modules_; | |
| 966 int module_index_; | |
| 967 const ExpressionContext* context_; | |
| 968 ZoneList<BailoutEntry> bailout_entries_; | |
| 969 ZoneList<BackEdgeEntry> back_edges_; | |
| 970 ZoneVector<HandlerTableEntry> handler_table_; | |
| 971 int ic_total_count_; | |
| 972 Handle<Cell> profiling_counter_; | |
| 973 bool generate_debug_code_; | |
| 974 | |
| 975 friend class NestedStatement; | |
| 976 | |
| 977 DEFINE_AST_VISITOR_SUBCLASS_MEMBERS(); | |
| 978 DISALLOW_COPY_AND_ASSIGN(FullCodeGenerator); | |
| 979 }; | |
| 980 | |
| 981 | |
| 982 // A map from property names to getter/setter pairs allocated in the zone. | |
| 983 class AccessorTable: public TemplateHashMap<Literal, | |
| 984 ObjectLiteral::Accessors, | |
| 985 ZoneAllocationPolicy> { | |
| 986 public: | |
| 987 explicit AccessorTable(Zone* zone) : | |
| 988 TemplateHashMap<Literal, ObjectLiteral::Accessors, | |
| 989 ZoneAllocationPolicy>(Literal::Match, | |
| 990 ZoneAllocationPolicy(zone)), | |
| 991 zone_(zone) { } | |
| 992 | |
| 993 Iterator lookup(Literal* literal) { | |
| 994 Iterator it = find(literal, true, ZoneAllocationPolicy(zone_)); | |
| 995 if (it->second == NULL) it->second = new(zone_) ObjectLiteral::Accessors(); | |
| 996 return it; | |
| 997 } | |
| 998 | |
| 999 private: | |
| 1000 Zone* zone_; | |
| 1001 }; | |
| 1002 | |
| 1003 | |
| 1004 class BackEdgeTable { | |
| 1005 public: | |
| 1006 BackEdgeTable(Code* code, DisallowHeapAllocation* required) { | |
| 1007 DCHECK(code->kind() == Code::FUNCTION); | |
| 1008 instruction_start_ = code->instruction_start(); | |
| 1009 Address table_address = instruction_start_ + code->back_edge_table_offset(); | |
| 1010 length_ = Memory::uint32_at(table_address); | |
| 1011 start_ = table_address + kTableLengthSize; | |
| 1012 } | |
| 1013 | |
| 1014 uint32_t length() { return length_; } | |
| 1015 | |
| 1016 BailoutId ast_id(uint32_t index) { | |
| 1017 return BailoutId(static_cast<int>( | |
| 1018 Memory::uint32_at(entry_at(index) + kAstIdOffset))); | |
| 1019 } | |
| 1020 | |
| 1021 uint32_t loop_depth(uint32_t index) { | |
| 1022 return Memory::uint32_at(entry_at(index) + kLoopDepthOffset); | |
| 1023 } | |
| 1024 | |
| 1025 uint32_t pc_offset(uint32_t index) { | |
| 1026 return Memory::uint32_at(entry_at(index) + kPcOffsetOffset); | |
| 1027 } | |
| 1028 | |
| 1029 Address pc(uint32_t index) { | |
| 1030 return instruction_start_ + pc_offset(index); | |
| 1031 } | |
| 1032 | |
| 1033 enum BackEdgeState { | |
| 1034 INTERRUPT, | |
| 1035 ON_STACK_REPLACEMENT, | |
| 1036 OSR_AFTER_STACK_CHECK | |
| 1037 }; | |
| 1038 | |
| 1039 // Increase allowed loop nesting level by one and patch those matching loops. | |
| 1040 static void Patch(Isolate* isolate, Code* unoptimized_code); | |
| 1041 | |
| 1042 // Patch the back edge to the target state, provided the correct callee. | |
| 1043 static void PatchAt(Code* unoptimized_code, | |
| 1044 Address pc, | |
| 1045 BackEdgeState target_state, | |
| 1046 Code* replacement_code); | |
| 1047 | |
| 1048 // Change all patched back edges back to normal interrupts. | |
| 1049 static void Revert(Isolate* isolate, | |
| 1050 Code* unoptimized_code); | |
| 1051 | |
| 1052 // Change a back edge patched for on-stack replacement to perform a | |
| 1053 // stack check first. | |
| 1054 static void AddStackCheck(Handle<Code> code, uint32_t pc_offset); | |
| 1055 | |
| 1056 // Revert the patch by AddStackCheck. | |
| 1057 static void RemoveStackCheck(Handle<Code> code, uint32_t pc_offset); | |
| 1058 | |
| 1059 // Return the current patch state of the back edge. | |
| 1060 static BackEdgeState GetBackEdgeState(Isolate* isolate, | |
| 1061 Code* unoptimized_code, | |
| 1062 Address pc_after); | |
| 1063 | |
| 1064 #ifdef DEBUG | |
| 1065 // Verify that all back edges of a certain loop depth are patched. | |
| 1066 static bool Verify(Isolate* isolate, Code* unoptimized_code); | |
| 1067 #endif // DEBUG | |
| 1068 | |
| 1069 private: | |
| 1070 Address entry_at(uint32_t index) { | |
| 1071 DCHECK(index < length_); | |
| 1072 return start_ + index * kEntrySize; | |
| 1073 } | |
| 1074 | |
| 1075 static const int kTableLengthSize = kIntSize; | |
| 1076 static const int kAstIdOffset = 0 * kIntSize; | |
| 1077 static const int kPcOffsetOffset = 1 * kIntSize; | |
| 1078 static const int kLoopDepthOffset = 2 * kIntSize; | |
| 1079 static const int kEntrySize = 3 * kIntSize; | |
| 1080 | |
| 1081 Address start_; | |
| 1082 Address instruction_start_; | |
| 1083 uint32_t length_; | |
| 1084 }; | |
| 1085 | |
| 1086 | |
| 1087 } } // namespace v8::internal | |
| 1088 | |
| 1089 #endif // V8_FULL_CODEGEN_H_ | |
| OLD | NEW |