| OLD | NEW |
| (Empty) |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "src/v8.h" | |
| 6 | |
| 7 #include "src/ast.h" | |
| 8 #include "src/ast-numbering.h" | |
| 9 #include "src/code-factory.h" | |
| 10 #include "src/codegen.h" | |
| 11 #include "src/compiler.h" | |
| 12 #include "src/debug.h" | |
| 13 #include "src/full-codegen.h" | |
| 14 #include "src/liveedit.h" | |
| 15 #include "src/macro-assembler.h" | |
| 16 #include "src/prettyprinter.h" | |
| 17 #include "src/scopeinfo.h" | |
| 18 #include "src/scopes.h" | |
| 19 #include "src/snapshot/snapshot.h" | |
| 20 | |
| 21 namespace v8 { | |
| 22 namespace internal { | |
| 23 | |
| 24 #define __ ACCESS_MASM(masm()) | |
| 25 | |
| 26 bool FullCodeGenerator::MakeCode(CompilationInfo* info) { | |
| 27 Isolate* isolate = info->isolate(); | |
| 28 | |
| 29 TimerEventScope<TimerEventCompileFullCode> timer(info->isolate()); | |
| 30 | |
| 31 // Ensure that the feedback vector is large enough. | |
| 32 info->EnsureFeedbackVector(); | |
| 33 | |
| 34 Handle<Script> script = info->script(); | |
| 35 if (!script->IsUndefined() && !script->source()->IsUndefined()) { | |
| 36 int len = String::cast(script->source())->length(); | |
| 37 isolate->counters()->total_full_codegen_source_size()->Increment(len); | |
| 38 } | |
| 39 CodeGenerator::MakeCodePrologue(info, "full"); | |
| 40 const int kInitialBufferSize = 4 * KB; | |
| 41 MacroAssembler masm(info->isolate(), NULL, kInitialBufferSize); | |
| 42 if (info->will_serialize()) masm.enable_serializer(); | |
| 43 | |
| 44 LOG_CODE_EVENT(isolate, | |
| 45 CodeStartLinePosInfoRecordEvent(masm.positions_recorder())); | |
| 46 | |
| 47 FullCodeGenerator cgen(&masm, info); | |
| 48 cgen.Generate(); | |
| 49 if (cgen.HasStackOverflow()) { | |
| 50 DCHECK(!isolate->has_pending_exception()); | |
| 51 return false; | |
| 52 } | |
| 53 unsigned table_offset = cgen.EmitBackEdgeTable(); | |
| 54 | |
| 55 Code::Flags flags = Code::ComputeFlags(Code::FUNCTION); | |
| 56 Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&masm, flags, info); | |
| 57 cgen.PopulateDeoptimizationData(code); | |
| 58 cgen.PopulateTypeFeedbackInfo(code); | |
| 59 cgen.PopulateHandlerTable(code); | |
| 60 code->set_has_deoptimization_support(info->HasDeoptimizationSupport()); | |
| 61 code->set_has_reloc_info_for_serialization(info->will_serialize()); | |
| 62 code->set_compiled_optimizable(info->IsOptimizable()); | |
| 63 code->set_allow_osr_at_loop_nesting_level(0); | |
| 64 code->set_profiler_ticks(0); | |
| 65 code->set_back_edge_table_offset(table_offset); | |
| 66 CodeGenerator::PrintCode(code, info); | |
| 67 info->SetCode(code); | |
| 68 void* line_info = masm.positions_recorder()->DetachJITHandlerData(); | |
| 69 LOG_CODE_EVENT(isolate, CodeEndLinePosInfoRecordEvent(*code, line_info)); | |
| 70 | |
| 71 #ifdef DEBUG | |
| 72 // Check that no context-specific object has been embedded. | |
| 73 code->VerifyEmbeddedObjects(Code::kNoContextSpecificPointers); | |
| 74 #endif // DEBUG | |
| 75 return true; | |
| 76 } | |
| 77 | |
| 78 | |
| 79 unsigned FullCodeGenerator::EmitBackEdgeTable() { | |
| 80 // The back edge table consists of a length (in number of entries) | |
| 81 // field, and then a sequence of entries. Each entry is a pair of AST id | |
| 82 // and code-relative pc offset. | |
| 83 masm()->Align(kPointerSize); | |
| 84 unsigned offset = masm()->pc_offset(); | |
| 85 unsigned length = back_edges_.length(); | |
| 86 __ dd(length); | |
| 87 for (unsigned i = 0; i < length; ++i) { | |
| 88 __ dd(back_edges_[i].id.ToInt()); | |
| 89 __ dd(back_edges_[i].pc); | |
| 90 __ dd(back_edges_[i].loop_depth); | |
| 91 } | |
| 92 return offset; | |
| 93 } | |
| 94 | |
| 95 | |
| 96 void FullCodeGenerator::EnsureSlotContainsAllocationSite( | |
| 97 FeedbackVectorSlot slot) { | |
| 98 Handle<TypeFeedbackVector> vector = FeedbackVector(); | |
| 99 if (!vector->Get(slot)->IsAllocationSite()) { | |
| 100 Handle<AllocationSite> allocation_site = | |
| 101 isolate()->factory()->NewAllocationSite(); | |
| 102 vector->Set(slot, *allocation_site); | |
| 103 } | |
| 104 } | |
| 105 | |
| 106 | |
| 107 void FullCodeGenerator::EnsureSlotContainsAllocationSite( | |
| 108 FeedbackVectorICSlot slot) { | |
| 109 Handle<TypeFeedbackVector> vector = FeedbackVector(); | |
| 110 if (!vector->Get(slot)->IsAllocationSite()) { | |
| 111 Handle<AllocationSite> allocation_site = | |
| 112 isolate()->factory()->NewAllocationSite(); | |
| 113 vector->Set(slot, *allocation_site); | |
| 114 } | |
| 115 } | |
| 116 | |
| 117 | |
| 118 void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) { | |
| 119 // Fill in the deoptimization information. | |
| 120 DCHECK(info_->HasDeoptimizationSupport() || bailout_entries_.is_empty()); | |
| 121 if (!info_->HasDeoptimizationSupport()) return; | |
| 122 int length = bailout_entries_.length(); | |
| 123 Handle<DeoptimizationOutputData> data = | |
| 124 DeoptimizationOutputData::New(isolate(), length, TENURED); | |
| 125 for (int i = 0; i < length; i++) { | |
| 126 data->SetAstId(i, bailout_entries_[i].id); | |
| 127 data->SetPcAndState(i, Smi::FromInt(bailout_entries_[i].pc_and_state)); | |
| 128 } | |
| 129 code->set_deoptimization_data(*data); | |
| 130 } | |
| 131 | |
| 132 | |
| 133 void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) { | |
| 134 Handle<TypeFeedbackInfo> info = isolate()->factory()->NewTypeFeedbackInfo(); | |
| 135 info->set_ic_total_count(ic_total_count_); | |
| 136 DCHECK(!isolate()->heap()->InNewSpace(*info)); | |
| 137 code->set_type_feedback_info(*info); | |
| 138 } | |
| 139 | |
| 140 | |
| 141 void FullCodeGenerator::PopulateHandlerTable(Handle<Code> code) { | |
| 142 int handler_table_size = static_cast<int>(handler_table_.size()); | |
| 143 Handle<HandlerTable> table = | |
| 144 Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray( | |
| 145 HandlerTable::LengthForRange(handler_table_size), TENURED)); | |
| 146 for (int i = 0; i < handler_table_size; ++i) { | |
| 147 HandlerTable::CatchPrediction prediction = | |
| 148 handler_table_[i].try_catch_depth > 0 ? HandlerTable::CAUGHT | |
| 149 : HandlerTable::UNCAUGHT; | |
| 150 table->SetRangeStart(i, handler_table_[i].range_start); | |
| 151 table->SetRangeEnd(i, handler_table_[i].range_end); | |
| 152 table->SetRangeHandler(i, handler_table_[i].handler_offset, prediction); | |
| 153 table->SetRangeDepth(i, handler_table_[i].stack_depth); | |
| 154 } | |
| 155 code->set_handler_table(*table); | |
| 156 } | |
| 157 | |
| 158 | |
| 159 int FullCodeGenerator::NewHandlerTableEntry() { | |
| 160 int index = static_cast<int>(handler_table_.size()); | |
| 161 HandlerTableEntry entry = {0, 0, 0, 0, 0}; | |
| 162 handler_table_.push_back(entry); | |
| 163 return index; | |
| 164 } | |
| 165 | |
| 166 | |
| 167 bool FullCodeGenerator::MustCreateObjectLiteralWithRuntime( | |
| 168 ObjectLiteral* expr) const { | |
| 169 int literal_flags = expr->ComputeFlags(); | |
| 170 // FastCloneShallowObjectStub doesn't copy elements, and object literals don't | |
| 171 // support copy-on-write (COW) elements for now. | |
| 172 // TODO(mvstanton): make object literals support COW elements. | |
| 173 return masm()->serializer_enabled() || | |
| 174 literal_flags != ObjectLiteral::kShallowProperties || | |
| 175 literal_flags != ObjectLiteral::kFastElements || | |
| 176 expr->properties_count() > | |
| 177 FastCloneShallowObjectStub::kMaximumClonedProperties; | |
| 178 } | |
| 179 | |
| 180 | |
| 181 bool FullCodeGenerator::MustCreateArrayLiteralWithRuntime( | |
| 182 ArrayLiteral* expr) const { | |
| 183 // TODO(rossberg): Teach strong mode to FastCloneShallowArrayStub. | |
| 184 return expr->depth() > 1 || expr->is_strong() || | |
| 185 expr->values()->length() > JSObject::kInitialMaxFastElementArray; | |
| 186 } | |
| 187 | |
| 188 | |
| 189 void FullCodeGenerator::Initialize() { | |
| 190 InitializeAstVisitor(info_->isolate(), info_->zone()); | |
| 191 // The generation of debug code must match between the snapshot code and the | |
| 192 // code that is generated later. This is assumed by the debugger when it is | |
| 193 // calculating PC offsets after generating a debug version of code. Therefore | |
| 194 // we disable the production of debug code in the full compiler if we are | |
| 195 // either generating a snapshot or we booted from a snapshot. | |
| 196 generate_debug_code_ = FLAG_debug_code && !masm_->serializer_enabled() && | |
| 197 !info_->isolate()->snapshot_available(); | |
| 198 masm_->set_emit_debug_code(generate_debug_code_); | |
| 199 masm_->set_predictable_code_size(true); | |
| 200 } | |
| 201 | |
| 202 | |
| 203 void FullCodeGenerator::PrepareForBailout(Expression* node, State state) { | |
| 204 PrepareForBailoutForId(node->id(), state); | |
| 205 } | |
| 206 | |
| 207 | |
| 208 void FullCodeGenerator::CallLoadIC(TypeofMode typeof_mode, | |
| 209 LanguageMode language_mode, | |
| 210 TypeFeedbackId id) { | |
| 211 Handle<Code> ic = | |
| 212 CodeFactory::LoadIC(isolate(), typeof_mode, language_mode).code(); | |
| 213 CallIC(ic, id); | |
| 214 } | |
| 215 | |
| 216 | |
| 217 void FullCodeGenerator::CallStoreIC(TypeFeedbackId id) { | |
| 218 Handle<Code> ic = CodeFactory::StoreIC(isolate(), language_mode()).code(); | |
| 219 CallIC(ic, id); | |
| 220 } | |
| 221 | |
| 222 | |
| 223 void FullCodeGenerator::RecordJSReturnSite(Call* call) { | |
| 224 // We record the offset of the function return so we can rebuild the frame | |
| 225 // if the function was inlined, i.e., this is the return address in the | |
| 226 // inlined function's frame. | |
| 227 // | |
| 228 // The state is ignored. We defensively set it to TOS_REG, which is the | |
| 229 // real state of the unoptimized code at the return site. | |
| 230 PrepareForBailoutForId(call->ReturnId(), TOS_REG); | |
| 231 #ifdef DEBUG | |
| 232 // In debug builds, mark the return so we can verify that this function | |
| 233 // was called. | |
| 234 DCHECK(!call->return_is_recorded_); | |
| 235 call->return_is_recorded_ = true; | |
| 236 #endif | |
| 237 } | |
| 238 | |
| 239 | |
| 240 void FullCodeGenerator::PrepareForBailoutForId(BailoutId id, State state) { | |
| 241 // There's no need to prepare this code for bailouts from already optimized | |
| 242 // code or code that can't be optimized. | |
| 243 if (!info_->HasDeoptimizationSupport()) return; | |
| 244 unsigned pc_and_state = | |
| 245 StateField::encode(state) | PcField::encode(masm_->pc_offset()); | |
| 246 DCHECK(Smi::IsValid(pc_and_state)); | |
| 247 #ifdef DEBUG | |
| 248 for (int i = 0; i < bailout_entries_.length(); ++i) { | |
| 249 DCHECK(bailout_entries_[i].id != id); | |
| 250 } | |
| 251 #endif | |
| 252 BailoutEntry entry = { id, pc_and_state }; | |
| 253 bailout_entries_.Add(entry, zone()); | |
| 254 } | |
| 255 | |
| 256 | |
| 257 void FullCodeGenerator::RecordBackEdge(BailoutId ast_id) { | |
| 258 // The pc offset does not need to be encoded and packed together with a state. | |
| 259 DCHECK(masm_->pc_offset() > 0); | |
| 260 DCHECK(loop_depth() > 0); | |
| 261 uint8_t depth = Min(loop_depth(), Code::kMaxLoopNestingMarker); | |
| 262 BackEdgeEntry entry = | |
| 263 { ast_id, static_cast<unsigned>(masm_->pc_offset()), depth }; | |
| 264 back_edges_.Add(entry, zone()); | |
| 265 } | |
| 266 | |
| 267 | |
| 268 bool FullCodeGenerator::ShouldInlineSmiCase(Token::Value op) { | |
| 269 // Inline smi case inside loops, but not division and modulo which | |
| 270 // are too complicated and take up too much space. | |
| 271 if (op == Token::DIV ||op == Token::MOD) return false; | |
| 272 if (FLAG_always_inline_smi_code) return true; | |
| 273 return loop_depth_ > 0; | |
| 274 } | |
| 275 | |
| 276 | |
| 277 void FullCodeGenerator::EffectContext::Plug(Variable* var) const { | |
| 278 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | |
| 279 } | |
| 280 | |
| 281 | |
| 282 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const { | |
| 283 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | |
| 284 codegen()->GetVar(result_register(), var); | |
| 285 } | |
| 286 | |
| 287 | |
| 288 void FullCodeGenerator::TestContext::Plug(Variable* var) const { | |
| 289 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | |
| 290 // For simplicity we always test the accumulator register. | |
| 291 codegen()->GetVar(result_register(), var); | |
| 292 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); | |
| 293 codegen()->DoTest(this); | |
| 294 } | |
| 295 | |
| 296 | |
| 297 void FullCodeGenerator::EffectContext::Plug(Register reg) const { | |
| 298 } | |
| 299 | |
| 300 | |
| 301 void FullCodeGenerator::AccumulatorValueContext::Plug(Register reg) const { | |
| 302 __ Move(result_register(), reg); | |
| 303 } | |
| 304 | |
| 305 | |
| 306 void FullCodeGenerator::StackValueContext::Plug(Register reg) const { | |
| 307 __ Push(reg); | |
| 308 } | |
| 309 | |
| 310 | |
| 311 void FullCodeGenerator::TestContext::Plug(Register reg) const { | |
| 312 // For simplicity we always test the accumulator register. | |
| 313 __ Move(result_register(), reg); | |
| 314 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); | |
| 315 codegen()->DoTest(this); | |
| 316 } | |
| 317 | |
| 318 | |
| 319 void FullCodeGenerator::EffectContext::Plug(bool flag) const {} | |
| 320 | |
| 321 | |
| 322 void FullCodeGenerator::EffectContext::PlugTOS() const { | |
| 323 __ Drop(1); | |
| 324 } | |
| 325 | |
| 326 | |
| 327 void FullCodeGenerator::AccumulatorValueContext::PlugTOS() const { | |
| 328 __ Pop(result_register()); | |
| 329 } | |
| 330 | |
| 331 | |
| 332 void FullCodeGenerator::StackValueContext::PlugTOS() const { | |
| 333 } | |
| 334 | |
| 335 | |
| 336 void FullCodeGenerator::TestContext::PlugTOS() const { | |
| 337 // For simplicity we always test the accumulator register. | |
| 338 __ Pop(result_register()); | |
| 339 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); | |
| 340 codegen()->DoTest(this); | |
| 341 } | |
| 342 | |
| 343 | |
| 344 void FullCodeGenerator::EffectContext::PrepareTest( | |
| 345 Label* materialize_true, | |
| 346 Label* materialize_false, | |
| 347 Label** if_true, | |
| 348 Label** if_false, | |
| 349 Label** fall_through) const { | |
| 350 // In an effect context, the true and the false case branch to the | |
| 351 // same label. | |
| 352 *if_true = *if_false = *fall_through = materialize_true; | |
| 353 } | |
| 354 | |
| 355 | |
| 356 void FullCodeGenerator::AccumulatorValueContext::PrepareTest( | |
| 357 Label* materialize_true, | |
| 358 Label* materialize_false, | |
| 359 Label** if_true, | |
| 360 Label** if_false, | |
| 361 Label** fall_through) const { | |
| 362 *if_true = *fall_through = materialize_true; | |
| 363 *if_false = materialize_false; | |
| 364 } | |
| 365 | |
| 366 | |
| 367 void FullCodeGenerator::StackValueContext::PrepareTest( | |
| 368 Label* materialize_true, | |
| 369 Label* materialize_false, | |
| 370 Label** if_true, | |
| 371 Label** if_false, | |
| 372 Label** fall_through) const { | |
| 373 *if_true = *fall_through = materialize_true; | |
| 374 *if_false = materialize_false; | |
| 375 } | |
| 376 | |
| 377 | |
| 378 void FullCodeGenerator::TestContext::PrepareTest( | |
| 379 Label* materialize_true, | |
| 380 Label* materialize_false, | |
| 381 Label** if_true, | |
| 382 Label** if_false, | |
| 383 Label** fall_through) const { | |
| 384 *if_true = true_label_; | |
| 385 *if_false = false_label_; | |
| 386 *fall_through = fall_through_; | |
| 387 } | |
| 388 | |
| 389 | |
| 390 void FullCodeGenerator::DoTest(const TestContext* context) { | |
| 391 DoTest(context->condition(), | |
| 392 context->true_label(), | |
| 393 context->false_label(), | |
| 394 context->fall_through()); | |
| 395 } | |
| 396 | |
| 397 | |
| 398 void FullCodeGenerator::VisitDeclarations( | |
| 399 ZoneList<Declaration*>* declarations) { | |
| 400 ZoneList<Handle<Object> >* saved_globals = globals_; | |
| 401 ZoneList<Handle<Object> > inner_globals(10, zone()); | |
| 402 globals_ = &inner_globals; | |
| 403 | |
| 404 AstVisitor::VisitDeclarations(declarations); | |
| 405 | |
| 406 if (!globals_->is_empty()) { | |
| 407 // Invoke the platform-dependent code generator to do the actual | |
| 408 // declaration of the global functions and variables. | |
| 409 Handle<FixedArray> array = | |
| 410 isolate()->factory()->NewFixedArray(globals_->length(), TENURED); | |
| 411 for (int i = 0; i < globals_->length(); ++i) | |
| 412 array->set(i, *globals_->at(i)); | |
| 413 DeclareGlobals(array); | |
| 414 } | |
| 415 | |
| 416 globals_ = saved_globals; | |
| 417 } | |
| 418 | |
| 419 | |
| 420 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) { | |
| 421 VariableProxy* proxy = declaration->proxy(); | |
| 422 Variable* variable = proxy->var(); | |
| 423 switch (variable->location()) { | |
| 424 case VariableLocation::GLOBAL: | |
| 425 case VariableLocation::UNALLOCATED: | |
| 426 // TODO(rossberg) | |
| 427 break; | |
| 428 | |
| 429 case VariableLocation::CONTEXT: { | |
| 430 Comment cmnt(masm_, "[ ImportDeclaration"); | |
| 431 EmitDebugCheckDeclarationContext(variable); | |
| 432 // TODO(rossberg) | |
| 433 break; | |
| 434 } | |
| 435 | |
| 436 case VariableLocation::PARAMETER: | |
| 437 case VariableLocation::LOCAL: | |
| 438 case VariableLocation::LOOKUP: | |
| 439 UNREACHABLE(); | |
| 440 } | |
| 441 } | |
| 442 | |
| 443 | |
| 444 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) { | |
| 445 // TODO(rossberg) | |
| 446 } | |
| 447 | |
| 448 | |
| 449 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { | |
| 450 Comment cmnt(masm_, "[ VariableProxy"); | |
| 451 EmitVariableLoad(expr); | |
| 452 } | |
| 453 | |
| 454 | |
| 455 int FullCodeGenerator::DeclareGlobalsFlags() { | |
| 456 DCHECK(DeclareGlobalsLanguageMode::is_valid(language_mode())); | |
| 457 return DeclareGlobalsEvalFlag::encode(is_eval()) | | |
| 458 DeclareGlobalsNativeFlag::encode(is_native()) | | |
| 459 DeclareGlobalsLanguageMode::encode(language_mode()); | |
| 460 } | |
| 461 | |
| 462 | |
| 463 void FullCodeGenerator::EmitSubString(CallRuntime* expr) { | |
| 464 // Load the arguments on the stack and call the stub. | |
| 465 SubStringStub stub(isolate()); | |
| 466 ZoneList<Expression*>* args = expr->arguments(); | |
| 467 DCHECK(args->length() == 3); | |
| 468 VisitForStackValue(args->at(0)); | |
| 469 VisitForStackValue(args->at(1)); | |
| 470 VisitForStackValue(args->at(2)); | |
| 471 __ CallStub(&stub); | |
| 472 context()->Plug(result_register()); | |
| 473 } | |
| 474 | |
| 475 | |
| 476 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) { | |
| 477 // Load the arguments on the stack and call the stub. | |
| 478 RegExpExecStub stub(isolate()); | |
| 479 ZoneList<Expression*>* args = expr->arguments(); | |
| 480 DCHECK(args->length() == 4); | |
| 481 VisitForStackValue(args->at(0)); | |
| 482 VisitForStackValue(args->at(1)); | |
| 483 VisitForStackValue(args->at(2)); | |
| 484 VisitForStackValue(args->at(3)); | |
| 485 __ CallStub(&stub); | |
| 486 context()->Plug(result_register()); | |
| 487 } | |
| 488 | |
| 489 | |
| 490 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { | |
| 491 // Load the arguments on the stack and call the runtime function. | |
| 492 ZoneList<Expression*>* args = expr->arguments(); | |
| 493 DCHECK(args->length() == 2); | |
| 494 VisitForStackValue(args->at(0)); | |
| 495 VisitForStackValue(args->at(1)); | |
| 496 | |
| 497 MathPowStub stub(isolate(), MathPowStub::ON_STACK); | |
| 498 __ CallStub(&stub); | |
| 499 context()->Plug(result_register()); | |
| 500 } | |
| 501 | |
| 502 | |
| 503 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { | |
| 504 ZoneList<Expression*>* args = expr->arguments(); | |
| 505 DCHECK_EQ(2, args->length()); | |
| 506 | |
| 507 VisitForStackValue(args->at(0)); | |
| 508 VisitForStackValue(args->at(1)); | |
| 509 | |
| 510 StringCompareStub stub(isolate()); | |
| 511 __ CallStub(&stub); | |
| 512 context()->Plug(result_register()); | |
| 513 } | |
| 514 | |
| 515 | |
| 516 bool RecordStatementPosition(MacroAssembler* masm, int pos) { | |
| 517 if (pos == RelocInfo::kNoPosition) return false; | |
| 518 masm->positions_recorder()->RecordStatementPosition(pos); | |
| 519 masm->positions_recorder()->RecordPosition(pos); | |
| 520 return masm->positions_recorder()->WriteRecordedPositions(); | |
| 521 } | |
| 522 | |
| 523 | |
| 524 bool RecordPosition(MacroAssembler* masm, int pos) { | |
| 525 if (pos == RelocInfo::kNoPosition) return false; | |
| 526 masm->positions_recorder()->RecordPosition(pos); | |
| 527 return masm->positions_recorder()->WriteRecordedPositions(); | |
| 528 } | |
| 529 | |
| 530 | |
| 531 void FullCodeGenerator::SetFunctionPosition(FunctionLiteral* fun) { | |
| 532 RecordPosition(masm_, fun->start_position()); | |
| 533 } | |
| 534 | |
| 535 | |
| 536 void FullCodeGenerator::SetReturnPosition(FunctionLiteral* fun) { | |
| 537 RecordStatementPosition(masm_, fun->end_position() - 1); | |
| 538 if (info_->is_debug()) { | |
| 539 // Always emit a debug break slot before a return. | |
| 540 DebugCodegen::GenerateSlot(masm_, RelocInfo::DEBUG_BREAK_SLOT_AT_RETURN); | |
| 541 } | |
| 542 } | |
| 543 | |
| 544 | |
| 545 void FullCodeGenerator::SetStatementPosition( | |
| 546 Statement* stmt, FullCodeGenerator::InsertBreak insert_break) { | |
| 547 if (stmt->position() == RelocInfo::kNoPosition) return; | |
| 548 bool recorded = RecordStatementPosition(masm_, stmt->position()); | |
| 549 if (recorded && insert_break == INSERT_BREAK && info_->is_debug() && | |
| 550 !stmt->IsDebuggerStatement()) { | |
| 551 DebugCodegen::GenerateSlot(masm_, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION); | |
| 552 } | |
| 553 } | |
| 554 | |
| 555 | |
| 556 void FullCodeGenerator::SetExpressionPosition( | |
| 557 Expression* expr, FullCodeGenerator::InsertBreak insert_break) { | |
| 558 if (expr->position() == RelocInfo::kNoPosition) return; | |
| 559 bool recorded = RecordPosition(masm_, expr->position()); | |
| 560 if (recorded && insert_break == INSERT_BREAK && info_->is_debug()) { | |
| 561 DebugCodegen::GenerateSlot(masm_, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION); | |
| 562 } | |
| 563 } | |
| 564 | |
| 565 | |
| 566 void FullCodeGenerator::SetExpressionAsStatementPosition(Expression* expr) { | |
| 567 if (expr->position() == RelocInfo::kNoPosition) return; | |
| 568 bool recorded = RecordStatementPosition(masm_, expr->position()); | |
| 569 if (recorded && info_->is_debug()) { | |
| 570 DebugCodegen::GenerateSlot(masm_, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION); | |
| 571 } | |
| 572 } | |
| 573 | |
| 574 | |
| 575 void FullCodeGenerator::SetCallPosition(Expression* expr, int argc) { | |
| 576 if (expr->position() == RelocInfo::kNoPosition) return; | |
| 577 RecordPosition(masm_, expr->position()); | |
| 578 if (info_->is_debug()) { | |
| 579 // Always emit a debug break slot before a call. | |
| 580 DebugCodegen::GenerateSlot(masm_, RelocInfo::DEBUG_BREAK_SLOT_AT_CALL, | |
| 581 argc); | |
| 582 } | |
| 583 } | |
| 584 | |
| 585 | |
| 586 void FullCodeGenerator::SetConstructCallPosition(Expression* expr) { | |
| 587 if (expr->position() == RelocInfo::kNoPosition) return; | |
| 588 RecordPosition(masm_, expr->position()); | |
| 589 if (info_->is_debug()) { | |
| 590 // Always emit a debug break slot before a construct call. | |
| 591 DebugCodegen::GenerateSlot(masm_, | |
| 592 RelocInfo::DEBUG_BREAK_SLOT_AT_CONSTRUCT_CALL); | |
| 593 } | |
| 594 } | |
| 595 | |
| 596 | |
| 597 void FullCodeGenerator::VisitSuperPropertyReference( | |
| 598 SuperPropertyReference* super) { | |
| 599 __ CallRuntime(Runtime::kThrowUnsupportedSuperError, 0); | |
| 600 } | |
| 601 | |
| 602 | |
| 603 void FullCodeGenerator::VisitSuperCallReference(SuperCallReference* super) { | |
| 604 __ CallRuntime(Runtime::kThrowUnsupportedSuperError, 0); | |
| 605 } | |
| 606 | |
| 607 | |
| 608 void FullCodeGenerator::EmitGeneratorNext(CallRuntime* expr) { | |
| 609 ZoneList<Expression*>* args = expr->arguments(); | |
| 610 DCHECK(args->length() == 2); | |
| 611 EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::NEXT); | |
| 612 } | |
| 613 | |
| 614 | |
| 615 void FullCodeGenerator::EmitGeneratorThrow(CallRuntime* expr) { | |
| 616 ZoneList<Expression*>* args = expr->arguments(); | |
| 617 DCHECK(args->length() == 2); | |
| 618 EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::THROW); | |
| 619 } | |
| 620 | |
| 621 | |
| 622 void FullCodeGenerator::EmitDebugBreakInOptimizedCode(CallRuntime* expr) { | |
| 623 context()->Plug(handle(Smi::FromInt(0), isolate())); | |
| 624 } | |
| 625 | |
| 626 | |
| 627 void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) { | |
| 628 switch (expr->op()) { | |
| 629 case Token::COMMA: | |
| 630 return VisitComma(expr); | |
| 631 case Token::OR: | |
| 632 case Token::AND: | |
| 633 return VisitLogicalExpression(expr); | |
| 634 default: | |
| 635 return VisitArithmeticExpression(expr); | |
| 636 } | |
| 637 } | |
| 638 | |
| 639 | |
| 640 void FullCodeGenerator::VisitInDuplicateContext(Expression* expr) { | |
| 641 if (context()->IsEffect()) { | |
| 642 VisitForEffect(expr); | |
| 643 } else if (context()->IsAccumulatorValue()) { | |
| 644 VisitForAccumulatorValue(expr); | |
| 645 } else if (context()->IsStackValue()) { | |
| 646 VisitForStackValue(expr); | |
| 647 } else if (context()->IsTest()) { | |
| 648 const TestContext* test = TestContext::cast(context()); | |
| 649 VisitForControl(expr, test->true_label(), test->false_label(), | |
| 650 test->fall_through()); | |
| 651 } | |
| 652 } | |
| 653 | |
| 654 | |
| 655 void FullCodeGenerator::VisitComma(BinaryOperation* expr) { | |
| 656 Comment cmnt(masm_, "[ Comma"); | |
| 657 VisitForEffect(expr->left()); | |
| 658 VisitInDuplicateContext(expr->right()); | |
| 659 } | |
| 660 | |
| 661 | |
| 662 void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) { | |
| 663 bool is_logical_and = expr->op() == Token::AND; | |
| 664 Comment cmnt(masm_, is_logical_and ? "[ Logical AND" : "[ Logical OR"); | |
| 665 Expression* left = expr->left(); | |
| 666 Expression* right = expr->right(); | |
| 667 BailoutId right_id = expr->RightId(); | |
| 668 Label done; | |
| 669 | |
| 670 if (context()->IsTest()) { | |
| 671 Label eval_right; | |
| 672 const TestContext* test = TestContext::cast(context()); | |
| 673 if (is_logical_and) { | |
| 674 VisitForControl(left, &eval_right, test->false_label(), &eval_right); | |
| 675 } else { | |
| 676 VisitForControl(left, test->true_label(), &eval_right, &eval_right); | |
| 677 } | |
| 678 PrepareForBailoutForId(right_id, NO_REGISTERS); | |
| 679 __ bind(&eval_right); | |
| 680 | |
| 681 } else if (context()->IsAccumulatorValue()) { | |
| 682 VisitForAccumulatorValue(left); | |
| 683 // We want the value in the accumulator for the test, and on the stack in | |
| 684 // case we need it. | |
| 685 __ Push(result_register()); | |
| 686 Label discard, restore; | |
| 687 if (is_logical_and) { | |
| 688 DoTest(left, &discard, &restore, &restore); | |
| 689 } else { | |
| 690 DoTest(left, &restore, &discard, &restore); | |
| 691 } | |
| 692 __ bind(&restore); | |
| 693 __ Pop(result_register()); | |
| 694 __ jmp(&done); | |
| 695 __ bind(&discard); | |
| 696 __ Drop(1); | |
| 697 PrepareForBailoutForId(right_id, NO_REGISTERS); | |
| 698 | |
| 699 } else if (context()->IsStackValue()) { | |
| 700 VisitForAccumulatorValue(left); | |
| 701 // We want the value in the accumulator for the test, and on the stack in | |
| 702 // case we need it. | |
| 703 __ Push(result_register()); | |
| 704 Label discard; | |
| 705 if (is_logical_and) { | |
| 706 DoTest(left, &discard, &done, &discard); | |
| 707 } else { | |
| 708 DoTest(left, &done, &discard, &discard); | |
| 709 } | |
| 710 __ bind(&discard); | |
| 711 __ Drop(1); | |
| 712 PrepareForBailoutForId(right_id, NO_REGISTERS); | |
| 713 | |
| 714 } else { | |
| 715 DCHECK(context()->IsEffect()); | |
| 716 Label eval_right; | |
| 717 if (is_logical_and) { | |
| 718 VisitForControl(left, &eval_right, &done, &eval_right); | |
| 719 } else { | |
| 720 VisitForControl(left, &done, &eval_right, &eval_right); | |
| 721 } | |
| 722 PrepareForBailoutForId(right_id, NO_REGISTERS); | |
| 723 __ bind(&eval_right); | |
| 724 } | |
| 725 | |
| 726 VisitInDuplicateContext(right); | |
| 727 __ bind(&done); | |
| 728 } | |
| 729 | |
| 730 | |
| 731 void FullCodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) { | |
| 732 Token::Value op = expr->op(); | |
| 733 Comment cmnt(masm_, "[ ArithmeticExpression"); | |
| 734 Expression* left = expr->left(); | |
| 735 Expression* right = expr->right(); | |
| 736 | |
| 737 VisitForStackValue(left); | |
| 738 VisitForAccumulatorValue(right); | |
| 739 | |
| 740 SetExpressionPosition(expr); | |
| 741 if (ShouldInlineSmiCase(op)) { | |
| 742 EmitInlineSmiBinaryOp(expr, op, left, right); | |
| 743 } else { | |
| 744 EmitBinaryOp(expr, op); | |
| 745 } | |
| 746 } | |
| 747 | |
| 748 | |
| 749 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { | |
| 750 VariableProxy* proxy = expr->AsVariableProxy(); | |
| 751 DCHECK(!context()->IsEffect()); | |
| 752 DCHECK(!context()->IsTest()); | |
| 753 | |
| 754 if (proxy != NULL && (proxy->var()->IsUnallocatedOrGlobalSlot() || | |
| 755 proxy->var()->IsLookupSlot())) { | |
| 756 EmitVariableLoad(proxy, INSIDE_TYPEOF); | |
| 757 PrepareForBailout(proxy, TOS_REG); | |
| 758 } else { | |
| 759 // This expression cannot throw a reference error at the top level. | |
| 760 VisitInDuplicateContext(expr); | |
| 761 } | |
| 762 } | |
| 763 | |
| 764 | |
| 765 void FullCodeGenerator::VisitBlock(Block* stmt) { | |
| 766 Comment cmnt(masm_, "[ Block"); | |
| 767 NestedBlock nested_block(this, stmt); | |
| 768 SetStatementPosition(stmt); | |
| 769 | |
| 770 { | |
| 771 EnterBlockScopeIfNeeded block_scope_state( | |
| 772 this, stmt->scope(), stmt->EntryId(), stmt->DeclsId(), stmt->ExitId()); | |
| 773 VisitStatements(stmt->statements()); | |
| 774 __ bind(nested_block.break_label()); | |
| 775 } | |
| 776 } | |
| 777 | |
| 778 | |
| 779 void FullCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) { | |
| 780 Comment cmnt(masm_, "[ ExpressionStatement"); | |
| 781 SetStatementPosition(stmt); | |
| 782 VisitForEffect(stmt->expression()); | |
| 783 } | |
| 784 | |
| 785 | |
| 786 void FullCodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) { | |
| 787 Comment cmnt(masm_, "[ EmptyStatement"); | |
| 788 SetStatementPosition(stmt); | |
| 789 } | |
| 790 | |
| 791 | |
| 792 void FullCodeGenerator::VisitIfStatement(IfStatement* stmt) { | |
| 793 Comment cmnt(masm_, "[ IfStatement"); | |
| 794 SetStatementPosition(stmt); | |
| 795 Label then_part, else_part, done; | |
| 796 | |
| 797 if (stmt->HasElseStatement()) { | |
| 798 VisitForControl(stmt->condition(), &then_part, &else_part, &then_part); | |
| 799 PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS); | |
| 800 __ bind(&then_part); | |
| 801 Visit(stmt->then_statement()); | |
| 802 __ jmp(&done); | |
| 803 | |
| 804 PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS); | |
| 805 __ bind(&else_part); | |
| 806 Visit(stmt->else_statement()); | |
| 807 } else { | |
| 808 VisitForControl(stmt->condition(), &then_part, &done, &then_part); | |
| 809 PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS); | |
| 810 __ bind(&then_part); | |
| 811 Visit(stmt->then_statement()); | |
| 812 | |
| 813 PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS); | |
| 814 } | |
| 815 __ bind(&done); | |
| 816 PrepareForBailoutForId(stmt->IfId(), NO_REGISTERS); | |
| 817 } | |
| 818 | |
| 819 | |
| 820 void FullCodeGenerator::VisitContinueStatement(ContinueStatement* stmt) { | |
| 821 Comment cmnt(masm_, "[ ContinueStatement"); | |
| 822 SetStatementPosition(stmt); | |
| 823 NestedStatement* current = nesting_stack_; | |
| 824 int stack_depth = 0; | |
| 825 int context_length = 0; | |
| 826 // When continuing, we clobber the unpredictable value in the accumulator | |
| 827 // with one that's safe for GC. If we hit an exit from the try block of | |
| 828 // try...finally on our way out, we will unconditionally preserve the | |
| 829 // accumulator on the stack. | |
| 830 ClearAccumulator(); | |
| 831 while (!current->IsContinueTarget(stmt->target())) { | |
| 832 current = current->Exit(&stack_depth, &context_length); | |
| 833 } | |
| 834 __ Drop(stack_depth); | |
| 835 if (context_length > 0) { | |
| 836 while (context_length > 0) { | |
| 837 LoadContextField(context_register(), Context::PREVIOUS_INDEX); | |
| 838 --context_length; | |
| 839 } | |
| 840 StoreToFrameField(StandardFrameConstants::kContextOffset, | |
| 841 context_register()); | |
| 842 } | |
| 843 | |
| 844 __ jmp(current->AsIteration()->continue_label()); | |
| 845 } | |
| 846 | |
| 847 | |
| 848 void FullCodeGenerator::VisitBreakStatement(BreakStatement* stmt) { | |
| 849 Comment cmnt(masm_, "[ BreakStatement"); | |
| 850 SetStatementPosition(stmt); | |
| 851 NestedStatement* current = nesting_stack_; | |
| 852 int stack_depth = 0; | |
| 853 int context_length = 0; | |
| 854 // When breaking, we clobber the unpredictable value in the accumulator | |
| 855 // with one that's safe for GC. If we hit an exit from the try block of | |
| 856 // try...finally on our way out, we will unconditionally preserve the | |
| 857 // accumulator on the stack. | |
| 858 ClearAccumulator(); | |
| 859 while (!current->IsBreakTarget(stmt->target())) { | |
| 860 current = current->Exit(&stack_depth, &context_length); | |
| 861 } | |
| 862 __ Drop(stack_depth); | |
| 863 if (context_length > 0) { | |
| 864 while (context_length > 0) { | |
| 865 LoadContextField(context_register(), Context::PREVIOUS_INDEX); | |
| 866 --context_length; | |
| 867 } | |
| 868 StoreToFrameField(StandardFrameConstants::kContextOffset, | |
| 869 context_register()); | |
| 870 } | |
| 871 | |
| 872 __ jmp(current->AsBreakable()->break_label()); | |
| 873 } | |
| 874 | |
| 875 | |
| 876 void FullCodeGenerator::EmitUnwindBeforeReturn() { | |
| 877 NestedStatement* current = nesting_stack_; | |
| 878 int stack_depth = 0; | |
| 879 int context_length = 0; | |
| 880 while (current != NULL) { | |
| 881 current = current->Exit(&stack_depth, &context_length); | |
| 882 } | |
| 883 __ Drop(stack_depth); | |
| 884 } | |
| 885 | |
| 886 | |
| 887 void FullCodeGenerator::EmitPropertyKey(ObjectLiteralProperty* property, | |
| 888 BailoutId bailout_id) { | |
| 889 VisitForStackValue(property->key()); | |
| 890 __ InvokeBuiltin(Builtins::TO_NAME, CALL_FUNCTION); | |
| 891 PrepareForBailoutForId(bailout_id, NO_REGISTERS); | |
| 892 __ Push(result_register()); | |
| 893 } | |
| 894 | |
| 895 | |
| 896 void FullCodeGenerator::EmitLoadSuperConstructor(SuperCallReference* ref) { | |
| 897 VisitForStackValue(ref->this_function_var()); | |
| 898 __ CallRuntime(Runtime::kGetPrototype, 1); | |
| 899 } | |
| 900 | |
| 901 | |
| 902 void FullCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) { | |
| 903 Comment cmnt(masm_, "[ ReturnStatement"); | |
| 904 SetStatementPosition(stmt); | |
| 905 Expression* expr = stmt->expression(); | |
| 906 VisitForAccumulatorValue(expr); | |
| 907 EmitUnwindBeforeReturn(); | |
| 908 EmitReturnSequence(); | |
| 909 } | |
| 910 | |
| 911 | |
| 912 void FullCodeGenerator::VisitWithStatement(WithStatement* stmt) { | |
| 913 Comment cmnt(masm_, "[ WithStatement"); | |
| 914 SetStatementPosition(stmt); | |
| 915 | |
| 916 VisitForStackValue(stmt->expression()); | |
| 917 PushFunctionArgumentForContextAllocation(); | |
| 918 __ CallRuntime(Runtime::kPushWithContext, 2); | |
| 919 StoreToFrameField(StandardFrameConstants::kContextOffset, context_register()); | |
| 920 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); | |
| 921 | |
| 922 Scope* saved_scope = scope(); | |
| 923 scope_ = stmt->scope(); | |
| 924 { WithOrCatch body(this); | |
| 925 Visit(stmt->statement()); | |
| 926 } | |
| 927 scope_ = saved_scope; | |
| 928 | |
| 929 // Pop context. | |
| 930 LoadContextField(context_register(), Context::PREVIOUS_INDEX); | |
| 931 // Update local stack frame context field. | |
| 932 StoreToFrameField(StandardFrameConstants::kContextOffset, context_register()); | |
| 933 } | |
| 934 | |
| 935 | |
| 936 void FullCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) { | |
| 937 Comment cmnt(masm_, "[ DoWhileStatement"); | |
| 938 // Do not insert break location as we do that below. | |
| 939 SetStatementPosition(stmt, SKIP_BREAK); | |
| 940 | |
| 941 Label body, book_keeping; | |
| 942 | |
| 943 Iteration loop_statement(this, stmt); | |
| 944 increment_loop_depth(); | |
| 945 | |
| 946 __ bind(&body); | |
| 947 Visit(stmt->body()); | |
| 948 | |
| 949 // Record the position of the do while condition and make sure it is | |
| 950 // possible to break on the condition. | |
| 951 __ bind(loop_statement.continue_label()); | |
| 952 PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS); | |
| 953 | |
| 954 // Here is the actual 'while' keyword. | |
| 955 SetExpressionAsStatementPosition(stmt->cond()); | |
| 956 VisitForControl(stmt->cond(), | |
| 957 &book_keeping, | |
| 958 loop_statement.break_label(), | |
| 959 &book_keeping); | |
| 960 | |
| 961 // Check stack before looping. | |
| 962 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS); | |
| 963 __ bind(&book_keeping); | |
| 964 EmitBackEdgeBookkeeping(stmt, &body); | |
| 965 __ jmp(&body); | |
| 966 | |
| 967 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); | |
| 968 __ bind(loop_statement.break_label()); | |
| 969 decrement_loop_depth(); | |
| 970 } | |
| 971 | |
| 972 | |
| 973 void FullCodeGenerator::VisitWhileStatement(WhileStatement* stmt) { | |
| 974 Comment cmnt(masm_, "[ WhileStatement"); | |
| 975 Label loop, body; | |
| 976 | |
| 977 Iteration loop_statement(this, stmt); | |
| 978 increment_loop_depth(); | |
| 979 | |
| 980 __ bind(&loop); | |
| 981 | |
| 982 SetExpressionAsStatementPosition(stmt->cond()); | |
| 983 VisitForControl(stmt->cond(), | |
| 984 &body, | |
| 985 loop_statement.break_label(), | |
| 986 &body); | |
| 987 | |
| 988 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); | |
| 989 __ bind(&body); | |
| 990 Visit(stmt->body()); | |
| 991 | |
| 992 __ bind(loop_statement.continue_label()); | |
| 993 | |
| 994 // Check stack before looping. | |
| 995 EmitBackEdgeBookkeeping(stmt, &loop); | |
| 996 __ jmp(&loop); | |
| 997 | |
| 998 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); | |
| 999 __ bind(loop_statement.break_label()); | |
| 1000 decrement_loop_depth(); | |
| 1001 } | |
| 1002 | |
| 1003 | |
| 1004 void FullCodeGenerator::VisitForStatement(ForStatement* stmt) { | |
| 1005 Comment cmnt(masm_, "[ ForStatement"); | |
| 1006 // Do not insert break location as we do it below. | |
| 1007 SetStatementPosition(stmt, SKIP_BREAK); | |
| 1008 | |
| 1009 Label test, body; | |
| 1010 | |
| 1011 Iteration loop_statement(this, stmt); | |
| 1012 | |
| 1013 if (stmt->init() != NULL) { | |
| 1014 SetStatementPosition(stmt->init()); | |
| 1015 Visit(stmt->init()); | |
| 1016 } | |
| 1017 | |
| 1018 increment_loop_depth(); | |
| 1019 // Emit the test at the bottom of the loop (even if empty). | |
| 1020 __ jmp(&test); | |
| 1021 | |
| 1022 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); | |
| 1023 __ bind(&body); | |
| 1024 Visit(stmt->body()); | |
| 1025 | |
| 1026 PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS); | |
| 1027 __ bind(loop_statement.continue_label()); | |
| 1028 if (stmt->next() != NULL) { | |
| 1029 SetStatementPosition(stmt->next()); | |
| 1030 Visit(stmt->next()); | |
| 1031 } | |
| 1032 | |
| 1033 // Check stack before looping. | |
| 1034 EmitBackEdgeBookkeeping(stmt, &body); | |
| 1035 | |
| 1036 __ bind(&test); | |
| 1037 if (stmt->cond() != NULL) { | |
| 1038 SetExpressionAsStatementPosition(stmt->cond()); | |
| 1039 VisitForControl(stmt->cond(), | |
| 1040 &body, | |
| 1041 loop_statement.break_label(), | |
| 1042 loop_statement.break_label()); | |
| 1043 } else { | |
| 1044 __ jmp(&body); | |
| 1045 } | |
| 1046 | |
| 1047 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); | |
| 1048 __ bind(loop_statement.break_label()); | |
| 1049 decrement_loop_depth(); | |
| 1050 } | |
| 1051 | |
| 1052 | |
| 1053 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) { | |
| 1054 Comment cmnt(masm_, "[ ForOfStatement"); | |
| 1055 | |
| 1056 Iteration loop_statement(this, stmt); | |
| 1057 increment_loop_depth(); | |
| 1058 | |
| 1059 // var iterator = iterable[Symbol.iterator](); | |
| 1060 VisitForEffect(stmt->assign_iterator()); | |
| 1061 | |
| 1062 // Loop entry. | |
| 1063 __ bind(loop_statement.continue_label()); | |
| 1064 | |
| 1065 // result = iterator.next() | |
| 1066 SetExpressionAsStatementPosition(stmt->next_result()); | |
| 1067 VisitForEffect(stmt->next_result()); | |
| 1068 | |
| 1069 // if (result.done) break; | |
| 1070 Label result_not_done; | |
| 1071 VisitForControl(stmt->result_done(), loop_statement.break_label(), | |
| 1072 &result_not_done, &result_not_done); | |
| 1073 __ bind(&result_not_done); | |
| 1074 | |
| 1075 // each = result.value | |
| 1076 VisitForEffect(stmt->assign_each()); | |
| 1077 | |
| 1078 // Generate code for the body of the loop. | |
| 1079 Visit(stmt->body()); | |
| 1080 | |
| 1081 // Check stack before looping. | |
| 1082 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS); | |
| 1083 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label()); | |
| 1084 __ jmp(loop_statement.continue_label()); | |
| 1085 | |
| 1086 // Exit and decrement the loop depth. | |
| 1087 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); | |
| 1088 __ bind(loop_statement.break_label()); | |
| 1089 decrement_loop_depth(); | |
| 1090 } | |
| 1091 | |
| 1092 | |
| 1093 void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) { | |
| 1094 Comment cmnt(masm_, "[ TryCatchStatement"); | |
| 1095 SetStatementPosition(stmt, SKIP_BREAK); | |
| 1096 | |
| 1097 // The try block adds a handler to the exception handler chain before | |
| 1098 // entering, and removes it again when exiting normally. If an exception | |
| 1099 // is thrown during execution of the try block, the handler is consumed | |
| 1100 // and control is passed to the catch block with the exception in the | |
| 1101 // result register. | |
| 1102 | |
| 1103 Label try_entry, handler_entry, exit; | |
| 1104 __ jmp(&try_entry); | |
| 1105 __ bind(&handler_entry); | |
| 1106 PrepareForBailoutForId(stmt->HandlerId(), NO_REGISTERS); | |
| 1107 ClearPendingMessage(); | |
| 1108 | |
| 1109 // Exception handler code, the exception is in the result register. | |
| 1110 // Extend the context before executing the catch block. | |
| 1111 { Comment cmnt(masm_, "[ Extend catch context"); | |
| 1112 __ Push(stmt->variable()->name()); | |
| 1113 __ Push(result_register()); | |
| 1114 PushFunctionArgumentForContextAllocation(); | |
| 1115 __ CallRuntime(Runtime::kPushCatchContext, 3); | |
| 1116 StoreToFrameField(StandardFrameConstants::kContextOffset, | |
| 1117 context_register()); | |
| 1118 } | |
| 1119 | |
| 1120 Scope* saved_scope = scope(); | |
| 1121 scope_ = stmt->scope(); | |
| 1122 DCHECK(scope_->declarations()->is_empty()); | |
| 1123 { WithOrCatch catch_body(this); | |
| 1124 Visit(stmt->catch_block()); | |
| 1125 } | |
| 1126 // Restore the context. | |
| 1127 LoadContextField(context_register(), Context::PREVIOUS_INDEX); | |
| 1128 StoreToFrameField(StandardFrameConstants::kContextOffset, context_register()); | |
| 1129 scope_ = saved_scope; | |
| 1130 __ jmp(&exit); | |
| 1131 | |
| 1132 // Try block code. Sets up the exception handler chain. | |
| 1133 __ bind(&try_entry); | |
| 1134 | |
| 1135 try_catch_depth_++; | |
| 1136 int handler_index = NewHandlerTableEntry(); | |
| 1137 EnterTryBlock(handler_index, &handler_entry); | |
| 1138 { TryCatch try_body(this); | |
| 1139 Visit(stmt->try_block()); | |
| 1140 } | |
| 1141 ExitTryBlock(handler_index); | |
| 1142 try_catch_depth_--; | |
| 1143 __ bind(&exit); | |
| 1144 } | |
| 1145 | |
| 1146 | |
| 1147 void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) { | |
| 1148 Comment cmnt(masm_, "[ TryFinallyStatement"); | |
| 1149 SetStatementPosition(stmt, SKIP_BREAK); | |
| 1150 | |
| 1151 // Try finally is compiled by setting up a try-handler on the stack while | |
| 1152 // executing the try body, and removing it again afterwards. | |
| 1153 // | |
| 1154 // The try-finally construct can enter the finally block in three ways: | |
| 1155 // 1. By exiting the try-block normally. This removes the try-handler and | |
| 1156 // calls the finally block code before continuing. | |
| 1157 // 2. By exiting the try-block with a function-local control flow transfer | |
| 1158 // (break/continue/return). The site of the, e.g., break removes the | |
| 1159 // try handler and calls the finally block code before continuing | |
| 1160 // its outward control transfer. | |
| 1161 // 3. By exiting the try-block with a thrown exception. | |
| 1162 // This can happen in nested function calls. It traverses the try-handler | |
| 1163 // chain and consumes the try-handler entry before jumping to the | |
| 1164 // handler code. The handler code then calls the finally-block before | |
| 1165 // rethrowing the exception. | |
| 1166 // | |
| 1167 // The finally block must assume a return address on top of the stack | |
| 1168 // (or in the link register on ARM chips) and a value (return value or | |
| 1169 // exception) in the result register (rax/eax/r0), both of which must | |
| 1170 // be preserved. The return address isn't GC-safe, so it should be | |
| 1171 // cooked before GC. | |
| 1172 Label try_entry, handler_entry, finally_entry; | |
| 1173 | |
| 1174 // Jump to try-handler setup and try-block code. | |
| 1175 __ jmp(&try_entry); | |
| 1176 __ bind(&handler_entry); | |
| 1177 PrepareForBailoutForId(stmt->HandlerId(), NO_REGISTERS); | |
| 1178 | |
| 1179 // Exception handler code. This code is only executed when an exception | |
| 1180 // is thrown. The exception is in the result register, and must be | |
| 1181 // preserved by the finally block. Call the finally block and then | |
| 1182 // rethrow the exception if it returns. | |
| 1183 __ Call(&finally_entry); | |
| 1184 __ Push(result_register()); | |
| 1185 __ CallRuntime(Runtime::kReThrow, 1); | |
| 1186 | |
| 1187 // Finally block implementation. | |
| 1188 __ bind(&finally_entry); | |
| 1189 EnterFinallyBlock(); | |
| 1190 { Finally finally_body(this); | |
| 1191 Visit(stmt->finally_block()); | |
| 1192 } | |
| 1193 ExitFinallyBlock(); // Return to the calling code. | |
| 1194 | |
| 1195 // Set up try handler. | |
| 1196 __ bind(&try_entry); | |
| 1197 int handler_index = NewHandlerTableEntry(); | |
| 1198 EnterTryBlock(handler_index, &handler_entry); | |
| 1199 { TryFinally try_body(this, &finally_entry); | |
| 1200 Visit(stmt->try_block()); | |
| 1201 } | |
| 1202 ExitTryBlock(handler_index); | |
| 1203 // Execute the finally block on the way out. Clobber the unpredictable | |
| 1204 // value in the result register with one that's safe for GC because the | |
| 1205 // finally block will unconditionally preserve the result register on the | |
| 1206 // stack. | |
| 1207 ClearAccumulator(); | |
| 1208 __ Call(&finally_entry); | |
| 1209 } | |
| 1210 | |
| 1211 | |
| 1212 void FullCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) { | |
| 1213 Comment cmnt(masm_, "[ DebuggerStatement"); | |
| 1214 SetStatementPosition(stmt); | |
| 1215 | |
| 1216 __ DebugBreak(); | |
| 1217 // Ignore the return value. | |
| 1218 | |
| 1219 PrepareForBailoutForId(stmt->DebugBreakId(), NO_REGISTERS); | |
| 1220 } | |
| 1221 | |
| 1222 | |
| 1223 void FullCodeGenerator::VisitCaseClause(CaseClause* clause) { | |
| 1224 UNREACHABLE(); | |
| 1225 } | |
| 1226 | |
| 1227 | |
| 1228 void FullCodeGenerator::VisitConditional(Conditional* expr) { | |
| 1229 Comment cmnt(masm_, "[ Conditional"); | |
| 1230 Label true_case, false_case, done; | |
| 1231 VisitForControl(expr->condition(), &true_case, &false_case, &true_case); | |
| 1232 | |
| 1233 PrepareForBailoutForId(expr->ThenId(), NO_REGISTERS); | |
| 1234 __ bind(&true_case); | |
| 1235 SetExpressionPosition(expr->then_expression()); | |
| 1236 if (context()->IsTest()) { | |
| 1237 const TestContext* for_test = TestContext::cast(context()); | |
| 1238 VisitForControl(expr->then_expression(), | |
| 1239 for_test->true_label(), | |
| 1240 for_test->false_label(), | |
| 1241 NULL); | |
| 1242 } else { | |
| 1243 VisitInDuplicateContext(expr->then_expression()); | |
| 1244 __ jmp(&done); | |
| 1245 } | |
| 1246 | |
| 1247 PrepareForBailoutForId(expr->ElseId(), NO_REGISTERS); | |
| 1248 __ bind(&false_case); | |
| 1249 SetExpressionPosition(expr->else_expression()); | |
| 1250 VisitInDuplicateContext(expr->else_expression()); | |
| 1251 // If control flow falls through Visit, merge it with true case here. | |
| 1252 if (!context()->IsTest()) { | |
| 1253 __ bind(&done); | |
| 1254 } | |
| 1255 } | |
| 1256 | |
| 1257 | |
| 1258 void FullCodeGenerator::VisitLiteral(Literal* expr) { | |
| 1259 Comment cmnt(masm_, "[ Literal"); | |
| 1260 context()->Plug(expr->value()); | |
| 1261 } | |
| 1262 | |
| 1263 | |
| 1264 void FullCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) { | |
| 1265 Comment cmnt(masm_, "[ FunctionLiteral"); | |
| 1266 | |
| 1267 // Build the function boilerplate and instantiate it. | |
| 1268 Handle<SharedFunctionInfo> function_info = | |
| 1269 Compiler::GetSharedFunctionInfo(expr, script(), info_); | |
| 1270 if (function_info.is_null()) { | |
| 1271 SetStackOverflow(); | |
| 1272 return; | |
| 1273 } | |
| 1274 EmitNewClosure(function_info, expr->pretenure()); | |
| 1275 } | |
| 1276 | |
| 1277 | |
| 1278 void FullCodeGenerator::VisitClassLiteral(ClassLiteral* lit) { | |
| 1279 Comment cmnt(masm_, "[ ClassLiteral"); | |
| 1280 | |
| 1281 { | |
| 1282 EnterBlockScopeIfNeeded block_scope_state( | |
| 1283 this, lit->scope(), lit->EntryId(), lit->DeclsId(), lit->ExitId()); | |
| 1284 | |
| 1285 if (lit->raw_name() != NULL) { | |
| 1286 __ Push(lit->name()); | |
| 1287 } else { | |
| 1288 __ Push(isolate()->factory()->undefined_value()); | |
| 1289 } | |
| 1290 | |
| 1291 if (lit->extends() != NULL) { | |
| 1292 VisitForStackValue(lit->extends()); | |
| 1293 } else { | |
| 1294 __ Push(isolate()->factory()->the_hole_value()); | |
| 1295 } | |
| 1296 | |
| 1297 VisitForStackValue(lit->constructor()); | |
| 1298 | |
| 1299 __ Push(script()); | |
| 1300 __ Push(Smi::FromInt(lit->start_position())); | |
| 1301 __ Push(Smi::FromInt(lit->end_position())); | |
| 1302 | |
| 1303 __ CallRuntime(is_strong(language_mode()) ? Runtime::kDefineClassStrong | |
| 1304 : Runtime::kDefineClass, | |
| 1305 6); | |
| 1306 PrepareForBailoutForId(lit->CreateLiteralId(), TOS_REG); | |
| 1307 | |
| 1308 int store_slot_index = 0; | |
| 1309 EmitClassDefineProperties(lit, &store_slot_index); | |
| 1310 | |
| 1311 if (lit->scope() != NULL) { | |
| 1312 DCHECK_NOT_NULL(lit->class_variable_proxy()); | |
| 1313 FeedbackVectorICSlot slot = FLAG_vector_stores | |
| 1314 ? lit->GetNthSlot(store_slot_index++) | |
| 1315 : FeedbackVectorICSlot::Invalid(); | |
| 1316 EmitVariableAssignment(lit->class_variable_proxy()->var(), | |
| 1317 Token::INIT_CONST, slot); | |
| 1318 } | |
| 1319 | |
| 1320 // Verify that compilation exactly consumed the number of store ic slots | |
| 1321 // that the ClassLiteral node had to offer. | |
| 1322 DCHECK(!FLAG_vector_stores || store_slot_index == lit->slot_count()); | |
| 1323 } | |
| 1324 | |
| 1325 context()->Plug(result_register()); | |
| 1326 } | |
| 1327 | |
| 1328 | |
| 1329 void FullCodeGenerator::VisitNativeFunctionLiteral( | |
| 1330 NativeFunctionLiteral* expr) { | |
| 1331 Comment cmnt(masm_, "[ NativeFunctionLiteral"); | |
| 1332 | |
| 1333 v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate()); | |
| 1334 | |
| 1335 // Compute the function template for the native function. | |
| 1336 Handle<String> name = expr->name(); | |
| 1337 v8::Local<v8::FunctionTemplate> fun_template = | |
| 1338 expr->extension()->GetNativeFunctionTemplate(v8_isolate, | |
| 1339 v8::Utils::ToLocal(name)); | |
| 1340 DCHECK(!fun_template.IsEmpty()); | |
| 1341 | |
| 1342 // Instantiate the function and create a shared function info from it. | |
| 1343 Handle<JSFunction> fun = Utils::OpenHandle( | |
| 1344 *fun_template->GetFunction(v8_isolate->GetCurrentContext()) | |
| 1345 .ToLocalChecked()); | |
| 1346 const int literals = fun->NumberOfLiterals(); | |
| 1347 Handle<Code> code = Handle<Code>(fun->shared()->code()); | |
| 1348 Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub()); | |
| 1349 Handle<SharedFunctionInfo> shared = | |
| 1350 isolate()->factory()->NewSharedFunctionInfo( | |
| 1351 name, literals, FunctionKind::kNormalFunction, code, | |
| 1352 Handle<ScopeInfo>(fun->shared()->scope_info()), | |
| 1353 Handle<TypeFeedbackVector>(fun->shared()->feedback_vector())); | |
| 1354 shared->set_construct_stub(*construct_stub); | |
| 1355 | |
| 1356 // Copy the function data to the shared function info. | |
| 1357 shared->set_function_data(fun->shared()->function_data()); | |
| 1358 int parameters = fun->shared()->internal_formal_parameter_count(); | |
| 1359 shared->set_internal_formal_parameter_count(parameters); | |
| 1360 | |
| 1361 EmitNewClosure(shared, false); | |
| 1362 } | |
| 1363 | |
| 1364 | |
| 1365 void FullCodeGenerator::VisitThrow(Throw* expr) { | |
| 1366 Comment cmnt(masm_, "[ Throw"); | |
| 1367 VisitForStackValue(expr->exception()); | |
| 1368 SetExpressionPosition(expr); | |
| 1369 __ CallRuntime(Runtime::kThrow, 1); | |
| 1370 // Never returns here. | |
| 1371 } | |
| 1372 | |
| 1373 | |
| 1374 void FullCodeGenerator::EnterTryBlock(int handler_index, Label* handler) { | |
| 1375 HandlerTableEntry* entry = &handler_table_[handler_index]; | |
| 1376 entry->range_start = masm()->pc_offset(); | |
| 1377 entry->handler_offset = handler->pos(); | |
| 1378 entry->try_catch_depth = try_catch_depth_; | |
| 1379 | |
| 1380 // Determine expression stack depth of try statement. | |
| 1381 int stack_depth = info_->scope()->num_stack_slots(); // Include stack locals. | |
| 1382 for (NestedStatement* current = nesting_stack_; current != NULL; /*nop*/) { | |
| 1383 current = current->AccumulateDepth(&stack_depth); | |
| 1384 } | |
| 1385 entry->stack_depth = stack_depth; | |
| 1386 | |
| 1387 // Push context onto operand stack. | |
| 1388 STATIC_ASSERT(TryBlockConstant::kElementCount == 1); | |
| 1389 __ Push(context_register()); | |
| 1390 } | |
| 1391 | |
| 1392 | |
| 1393 void FullCodeGenerator::ExitTryBlock(int handler_index) { | |
| 1394 HandlerTableEntry* entry = &handler_table_[handler_index]; | |
| 1395 entry->range_end = masm()->pc_offset(); | |
| 1396 | |
| 1397 // Drop context from operand stack. | |
| 1398 __ Drop(TryBlockConstant::kElementCount); | |
| 1399 } | |
| 1400 | |
| 1401 | |
| 1402 void FullCodeGenerator::VisitSpread(Spread* expr) { UNREACHABLE(); } | |
| 1403 | |
| 1404 | |
| 1405 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit( | |
| 1406 int* stack_depth, int* context_length) { | |
| 1407 // The macros used here must preserve the result register. | |
| 1408 | |
| 1409 // Because the handler block contains the context of the finally | |
| 1410 // code, we can restore it directly from there for the finally code | |
| 1411 // rather than iteratively unwinding contexts via their previous | |
| 1412 // links. | |
| 1413 if (*context_length > 0) { | |
| 1414 __ Drop(*stack_depth); // Down to the handler block. | |
| 1415 // Restore the context to its dedicated register and the stack. | |
| 1416 STATIC_ASSERT(TryFinally::kElementCount == 1); | |
| 1417 __ Pop(codegen_->context_register()); | |
| 1418 codegen_->StoreToFrameField(StandardFrameConstants::kContextOffset, | |
| 1419 codegen_->context_register()); | |
| 1420 } else { | |
| 1421 // Down to the handler block and also drop context. | |
| 1422 __ Drop(*stack_depth + kElementCount); | |
| 1423 } | |
| 1424 __ Call(finally_entry_); | |
| 1425 | |
| 1426 *stack_depth = 0; | |
| 1427 *context_length = 0; | |
| 1428 return previous_; | |
| 1429 } | |
| 1430 | |
| 1431 | |
| 1432 bool FullCodeGenerator::TryLiteralCompare(CompareOperation* expr) { | |
| 1433 Expression* sub_expr; | |
| 1434 Handle<String> check; | |
| 1435 if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) { | |
| 1436 EmitLiteralCompareTypeof(expr, sub_expr, check); | |
| 1437 return true; | |
| 1438 } | |
| 1439 | |
| 1440 if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) { | |
| 1441 EmitLiteralCompareNil(expr, sub_expr, kUndefinedValue); | |
| 1442 return true; | |
| 1443 } | |
| 1444 | |
| 1445 if (expr->IsLiteralCompareNull(&sub_expr)) { | |
| 1446 EmitLiteralCompareNil(expr, sub_expr, kNullValue); | |
| 1447 return true; | |
| 1448 } | |
| 1449 | |
| 1450 return false; | |
| 1451 } | |
| 1452 | |
| 1453 | |
| 1454 void BackEdgeTable::Patch(Isolate* isolate, Code* unoptimized) { | |
| 1455 DisallowHeapAllocation no_gc; | |
| 1456 Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement); | |
| 1457 | |
| 1458 // Increment loop nesting level by one and iterate over the back edge table | |
| 1459 // to find the matching loops to patch the interrupt | |
| 1460 // call to an unconditional call to the replacement code. | |
| 1461 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level() + 1; | |
| 1462 if (loop_nesting_level > Code::kMaxLoopNestingMarker) return; | |
| 1463 | |
| 1464 BackEdgeTable back_edges(unoptimized, &no_gc); | |
| 1465 for (uint32_t i = 0; i < back_edges.length(); i++) { | |
| 1466 if (static_cast<int>(back_edges.loop_depth(i)) == loop_nesting_level) { | |
| 1467 DCHECK_EQ(INTERRUPT, GetBackEdgeState(isolate, | |
| 1468 unoptimized, | |
| 1469 back_edges.pc(i))); | |
| 1470 PatchAt(unoptimized, back_edges.pc(i), ON_STACK_REPLACEMENT, patch); | |
| 1471 } | |
| 1472 } | |
| 1473 | |
| 1474 unoptimized->set_allow_osr_at_loop_nesting_level(loop_nesting_level); | |
| 1475 DCHECK(Verify(isolate, unoptimized)); | |
| 1476 } | |
| 1477 | |
| 1478 | |
| 1479 void BackEdgeTable::Revert(Isolate* isolate, Code* unoptimized) { | |
| 1480 DisallowHeapAllocation no_gc; | |
| 1481 Code* patch = isolate->builtins()->builtin(Builtins::kInterruptCheck); | |
| 1482 | |
| 1483 // Iterate over the back edge table and revert the patched interrupt calls. | |
| 1484 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level(); | |
| 1485 | |
| 1486 BackEdgeTable back_edges(unoptimized, &no_gc); | |
| 1487 for (uint32_t i = 0; i < back_edges.length(); i++) { | |
| 1488 if (static_cast<int>(back_edges.loop_depth(i)) <= loop_nesting_level) { | |
| 1489 DCHECK_NE(INTERRUPT, GetBackEdgeState(isolate, | |
| 1490 unoptimized, | |
| 1491 back_edges.pc(i))); | |
| 1492 PatchAt(unoptimized, back_edges.pc(i), INTERRUPT, patch); | |
| 1493 } | |
| 1494 } | |
| 1495 | |
| 1496 unoptimized->set_allow_osr_at_loop_nesting_level(0); | |
| 1497 // Assert that none of the back edges are patched anymore. | |
| 1498 DCHECK(Verify(isolate, unoptimized)); | |
| 1499 } | |
| 1500 | |
| 1501 | |
| 1502 void BackEdgeTable::AddStackCheck(Handle<Code> code, uint32_t pc_offset) { | |
| 1503 DisallowHeapAllocation no_gc; | |
| 1504 Isolate* isolate = code->GetIsolate(); | |
| 1505 Address pc = code->instruction_start() + pc_offset; | |
| 1506 Code* patch = isolate->builtins()->builtin(Builtins::kOsrAfterStackCheck); | |
| 1507 PatchAt(*code, pc, OSR_AFTER_STACK_CHECK, patch); | |
| 1508 } | |
| 1509 | |
| 1510 | |
| 1511 void BackEdgeTable::RemoveStackCheck(Handle<Code> code, uint32_t pc_offset) { | |
| 1512 DisallowHeapAllocation no_gc; | |
| 1513 Isolate* isolate = code->GetIsolate(); | |
| 1514 Address pc = code->instruction_start() + pc_offset; | |
| 1515 | |
| 1516 if (OSR_AFTER_STACK_CHECK == GetBackEdgeState(isolate, *code, pc)) { | |
| 1517 Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement); | |
| 1518 PatchAt(*code, pc, ON_STACK_REPLACEMENT, patch); | |
| 1519 } | |
| 1520 } | |
| 1521 | |
| 1522 | |
| 1523 #ifdef DEBUG | |
| 1524 bool BackEdgeTable::Verify(Isolate* isolate, Code* unoptimized) { | |
| 1525 DisallowHeapAllocation no_gc; | |
| 1526 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level(); | |
| 1527 BackEdgeTable back_edges(unoptimized, &no_gc); | |
| 1528 for (uint32_t i = 0; i < back_edges.length(); i++) { | |
| 1529 uint32_t loop_depth = back_edges.loop_depth(i); | |
| 1530 CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker); | |
| 1531 // Assert that all back edges for shallower loops (and only those) | |
| 1532 // have already been patched. | |
| 1533 CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level), | |
| 1534 GetBackEdgeState(isolate, | |
| 1535 unoptimized, | |
| 1536 back_edges.pc(i)) != INTERRUPT); | |
| 1537 } | |
| 1538 return true; | |
| 1539 } | |
| 1540 #endif // DEBUG | |
| 1541 | |
| 1542 | |
| 1543 FullCodeGenerator::EnterBlockScopeIfNeeded::EnterBlockScopeIfNeeded( | |
| 1544 FullCodeGenerator* codegen, Scope* scope, BailoutId entry_id, | |
| 1545 BailoutId declarations_id, BailoutId exit_id) | |
| 1546 : codegen_(codegen), exit_id_(exit_id) { | |
| 1547 saved_scope_ = codegen_->scope(); | |
| 1548 | |
| 1549 if (scope == NULL) { | |
| 1550 codegen_->PrepareForBailoutForId(entry_id, NO_REGISTERS); | |
| 1551 needs_block_context_ = false; | |
| 1552 } else { | |
| 1553 needs_block_context_ = scope->ContextLocalCount() > 0; | |
| 1554 codegen_->scope_ = scope; | |
| 1555 { | |
| 1556 if (needs_block_context_) { | |
| 1557 Comment cmnt(masm(), "[ Extend block context"); | |
| 1558 __ Push(scope->GetScopeInfo(codegen->isolate())); | |
| 1559 codegen_->PushFunctionArgumentForContextAllocation(); | |
| 1560 __ CallRuntime(Runtime::kPushBlockContext, 2); | |
| 1561 | |
| 1562 // Replace the context stored in the frame. | |
| 1563 codegen_->StoreToFrameField(StandardFrameConstants::kContextOffset, | |
| 1564 codegen_->context_register()); | |
| 1565 } | |
| 1566 CHECK_EQ(0, scope->num_stack_slots()); | |
| 1567 codegen_->PrepareForBailoutForId(entry_id, NO_REGISTERS); | |
| 1568 } | |
| 1569 { | |
| 1570 Comment cmnt(masm(), "[ Declarations"); | |
| 1571 codegen_->VisitDeclarations(scope->declarations()); | |
| 1572 codegen_->PrepareForBailoutForId(declarations_id, NO_REGISTERS); | |
| 1573 } | |
| 1574 } | |
| 1575 } | |
| 1576 | |
| 1577 | |
| 1578 FullCodeGenerator::EnterBlockScopeIfNeeded::~EnterBlockScopeIfNeeded() { | |
| 1579 if (needs_block_context_) { | |
| 1580 codegen_->LoadContextField(codegen_->context_register(), | |
| 1581 Context::PREVIOUS_INDEX); | |
| 1582 // Update local stack frame context field. | |
| 1583 codegen_->StoreToFrameField(StandardFrameConstants::kContextOffset, | |
| 1584 codegen_->context_register()); | |
| 1585 } | |
| 1586 codegen_->PrepareForBailoutForId(exit_id_, NO_REGISTERS); | |
| 1587 codegen_->scope_ = saved_scope_; | |
| 1588 } | |
| 1589 | |
| 1590 | |
| 1591 #undef __ | |
| 1592 | |
| 1593 | |
| 1594 } // namespace internal | |
| 1595 } // namespace v8 | |
| OLD | NEW |