 Chromium Code Reviews
 Chromium Code Reviews Issue 8492004:
  Fix lazy deoptimization at HInvokeFunction and enable target-recording call-function stub.  (Closed) 
  Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
    
  
    Issue 8492004:
  Fix lazy deoptimization at HInvokeFunction and enable target-recording call-function stub.  (Closed) 
  Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/| OLD | NEW | 
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. | 
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without | 
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are | 
| 4 // met: | 4 // met: | 
| 5 // | 5 // | 
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright | 
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. | 
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above | 
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following | 
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided | 
| (...skipping 26 matching lines...) Expand all Loading... | |
| 37 namespace v8 { | 37 namespace v8 { | 
| 38 namespace internal { | 38 namespace internal { | 
| 39 | 39 | 
| 40 | 40 | 
| 41 // When invoking builtins, we need to record the safepoint in the middle of | 41 // When invoking builtins, we need to record the safepoint in the middle of | 
| 42 // the invoke instruction sequence generated by the macro assembler. | 42 // the invoke instruction sequence generated by the macro assembler. | 
| 43 class SafepointGenerator : public CallWrapper { | 43 class SafepointGenerator : public CallWrapper { | 
| 44 public: | 44 public: | 
| 45 SafepointGenerator(LCodeGen* codegen, | 45 SafepointGenerator(LCodeGen* codegen, | 
| 46 LPointerMap* pointers, | 46 LPointerMap* pointers, | 
| 47 int deoptimization_index) | 47 Safepoint::DeoptMode mode) | 
| 48 : codegen_(codegen), | 48 : codegen_(codegen), | 
| 49 pointers_(pointers), | 49 pointers_(pointers), | 
| 50 deoptimization_index_(deoptimization_index) {} | 50 deopt_mode_(mode) {} | 
| 51 virtual ~SafepointGenerator() { } | 51 virtual ~SafepointGenerator() { } | 
| 52 | 52 | 
| 53 virtual void BeforeCall(int call_size) const {} | 53 virtual void BeforeCall(int call_size) const {} | 
| 54 | 54 | 
| 55 virtual void AfterCall() const { | 55 virtual void AfterCall() const { | 
| 56 codegen_->RecordSafepoint(pointers_, deoptimization_index_); | 56 codegen_->RecordSafepoint(pointers_, deopt_mode_); | 
| 57 } | 57 } | 
| 58 | 58 | 
| 59 private: | 59 private: | 
| 60 LCodeGen* codegen_; | 60 LCodeGen* codegen_; | 
| 61 LPointerMap* pointers_; | 61 LPointerMap* pointers_; | 
| 62 int deoptimization_index_; | 62 Safepoint::DeoptMode deopt_mode_; | 
| 63 }; | 63 }; | 
| 64 | 64 | 
| 65 | 65 | 
| 66 #define __ masm()-> | 66 #define __ masm()-> | 
| 67 | 67 | 
| 68 bool LCodeGen::GenerateCode() { | 68 bool LCodeGen::GenerateCode() { | 
| 69 HPhase phase("Code generation", chunk()); | 69 HPhase phase("Code generation", chunk()); | 
| 70 ASSERT(is_unused()); | 70 ASSERT(is_unused()); | 
| 71 status_ = GENERATING; | 71 status_ = GENERATING; | 
| 72 CpuFeatures::Scope scope(SSE2); | 72 CpuFeatures::Scope scope(SSE2); | 
| (...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 214 if (heap_slots > 0) { | 214 if (heap_slots > 0) { | 
| 215 Comment(";;; Allocate local context"); | 215 Comment(";;; Allocate local context"); | 
| 216 // Argument to NewContext is the function, which is still in edi. | 216 // Argument to NewContext is the function, which is still in edi. | 
| 217 __ push(edi); | 217 __ push(edi); | 
| 218 if (heap_slots <= FastNewContextStub::kMaximumSlots) { | 218 if (heap_slots <= FastNewContextStub::kMaximumSlots) { | 
| 219 FastNewContextStub stub(heap_slots); | 219 FastNewContextStub stub(heap_slots); | 
| 220 __ CallStub(&stub); | 220 __ CallStub(&stub); | 
| 221 } else { | 221 } else { | 
| 222 __ CallRuntime(Runtime::kNewFunctionContext, 1); | 222 __ CallRuntime(Runtime::kNewFunctionContext, 1); | 
| 223 } | 223 } | 
| 224 RecordSafepoint(Safepoint::kNoDeoptimizationIndex); | 224 RecordSafepoint(Safepoint::kNoDeopt); | 
| 225 // Context is returned in both eax and esi. It replaces the context | 225 // Context is returned in both eax and esi. It replaces the context | 
| 226 // passed to us. It's saved in the stack and kept live in esi. | 226 // passed to us. It's saved in the stack and kept live in esi. | 
| 227 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi); | 227 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi); | 
| 228 | 228 | 
| 229 // Copy parameters into context if necessary. | 229 // Copy parameters into context if necessary. | 
| 230 int num_parameters = scope()->num_parameters(); | 230 int num_parameters = scope()->num_parameters(); | 
| 231 for (int i = 0; i < num_parameters; i++) { | 231 for (int i = 0; i < num_parameters; i++) { | 
| 232 Variable* var = scope()->parameter(i); | 232 Variable* var = scope()->parameter(i); | 
| 233 if (var->IsContextSlot()) { | 233 if (var->IsContextSlot()) { | 
| 234 int parameter_offset = StandardFrameConstants::kCallerSPOffset + | 234 int parameter_offset = StandardFrameConstants::kCallerSPOffset + | 
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 273 | 273 | 
| 274 if (emit_instructions) { | 274 if (emit_instructions) { | 
| 275 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic()); | 275 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic()); | 
| 276 instr->CompileToNative(this); | 276 instr->CompileToNative(this); | 
| 277 } | 277 } | 
| 278 } | 278 } | 
| 279 return !is_aborted(); | 279 return !is_aborted(); | 
| 280 } | 280 } | 
| 281 | 281 | 
| 282 | 282 | 
| 283 LInstruction* LCodeGen::GetNextInstruction() { | |
| 284 if (current_instruction_ < instructions_->length() - 1) { | |
| 285 return instructions_->at(current_instruction_ + 1); | |
| 286 } else { | |
| 287 return NULL; | |
| 288 } | |
| 289 } | |
| 290 | |
| 291 | |
| 292 bool LCodeGen::GenerateDeferredCode() { | 283 bool LCodeGen::GenerateDeferredCode() { | 
| 293 ASSERT(is_generating()); | 284 ASSERT(is_generating()); | 
| 294 if (deferred_.length() > 0) { | 285 if (deferred_.length() > 0) { | 
| 295 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { | 286 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { | 
| 296 LDeferredCode* code = deferred_[i]; | 287 LDeferredCode* code = deferred_[i]; | 
| 297 __ bind(code->entry()); | 288 __ bind(code->entry()); | 
| 298 Comment(";;; Deferred code @%d: %s.", | 289 Comment(";;; Deferred code @%d: %s.", | 
| 299 code->instruction_index(), | 290 code->instruction_index(), | 
| 300 code->instr()->Mnemonic()); | 291 code->instr()->Mnemonic()); | 
| 301 code->Generate(); | 292 code->Generate(); | 
| 302 __ jmp(code->exit()); | 293 __ jmp(code->exit()); | 
| 303 } | 294 } | 
| 304 | |
| 305 // Pad code to ensure that the last piece of deferred code have | |
| 306 // room for lazy bailout. | |
| 307 while ((masm()->pc_offset() - LastSafepointEnd()) | |
| 308 < Deoptimizer::patch_size()) { | |
| 309 __ nop(); | |
| 310 } | |
| 311 } | 295 } | 
| 312 | 296 | 
| 313 // Deferred code is the last part of the instruction sequence. Mark | 297 // Deferred code is the last part of the instruction sequence. Mark | 
| 314 // the generated code as done unless we bailed out. | 298 // the generated code as done unless we bailed out. | 
| 315 if (!is_aborted()) status_ = DONE; | 299 if (!is_aborted()) status_ = DONE; | 
| 316 return !is_aborted(); | 300 return !is_aborted(); | 
| 317 } | 301 } | 
| 318 | 302 | 
| 319 | 303 | 
| 320 bool LCodeGen::GenerateSafepointTable() { | 304 bool LCodeGen::GenerateSafepointTable() { | 
| (...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 482 void LCodeGen::CallCodeGeneric(Handle<Code> code, | 466 void LCodeGen::CallCodeGeneric(Handle<Code> code, | 
| 483 RelocInfo::Mode mode, | 467 RelocInfo::Mode mode, | 
| 484 LInstruction* instr, | 468 LInstruction* instr, | 
| 485 SafepointMode safepoint_mode) { | 469 SafepointMode safepoint_mode) { | 
| 486 ASSERT(instr != NULL); | 470 ASSERT(instr != NULL); | 
| 487 LPointerMap* pointers = instr->pointer_map(); | 471 LPointerMap* pointers = instr->pointer_map(); | 
| 488 RecordPosition(pointers->position()); | 472 RecordPosition(pointers->position()); | 
| 489 | 473 | 
| 490 __ call(code, mode); | 474 __ call(code, mode); | 
| 491 | 475 | 
| 492 RegisterLazyDeoptimization(instr, safepoint_mode); | 476 RecordSafepointWithLazyDeopt(instr, safepoint_mode); | 
| 493 | 477 | 
| 494 // Signal that we don't inline smi code before these stubs in the | 478 // Signal that we don't inline smi code before these stubs in the | 
| 495 // optimizing code generator. | 479 // optimizing code generator. | 
| 496 if (code->kind() == Code::BINARY_OP_IC || | 480 if (code->kind() == Code::BINARY_OP_IC || | 
| 497 code->kind() == Code::COMPARE_IC) { | 481 code->kind() == Code::COMPARE_IC) { | 
| 498 __ nop(); | 482 __ nop(); | 
| 499 } | 483 } | 
| 500 } | 484 } | 
| 501 | 485 | 
| 502 | 486 | 
| 503 void LCodeGen::CallCode(Handle<Code> code, | 487 void LCodeGen::CallCode(Handle<Code> code, | 
| 504 RelocInfo::Mode mode, | 488 RelocInfo::Mode mode, | 
| 505 LInstruction* instr) { | 489 LInstruction* instr) { | 
| 506 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT); | 490 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT); | 
| 507 } | 491 } | 
| 508 | 492 | 
| 509 | 493 | 
| 510 void LCodeGen::CallRuntime(const Runtime::Function* fun, | 494 void LCodeGen::CallRuntime(const Runtime::Function* fun, | 
| 511 int argc, | 495 int argc, | 
| 512 LInstruction* instr) { | 496 LInstruction* instr) { | 
| 513 ASSERT(instr != NULL); | 497 ASSERT(instr != NULL); | 
| 514 ASSERT(instr->HasPointerMap()); | 498 ASSERT(instr->HasPointerMap()); | 
| 515 LPointerMap* pointers = instr->pointer_map(); | 499 LPointerMap* pointers = instr->pointer_map(); | 
| 516 RecordPosition(pointers->position()); | 500 RecordPosition(pointers->position()); | 
| 517 | 501 | 
| 518 __ CallRuntime(fun, argc); | 502 __ CallRuntime(fun, argc); | 
| 519 | 503 | 
| 520 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT); | 504 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); | 
| 521 } | 505 } | 
| 522 | 506 | 
| 523 | 507 | 
| 524 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, | 508 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, | 
| 525 int argc, | 509 int argc, | 
| 526 LInstruction* instr, | 510 LInstruction* instr, | 
| 527 LOperand* context) { | 511 LOperand* context) { | 
| 528 if (context->IsRegister()) { | 512 if (context->IsRegister()) { | 
| 529 if (!ToRegister(context).is(esi)) { | 513 if (!ToRegister(context).is(esi)) { | 
| 530 __ mov(esi, ToRegister(context)); | 514 __ mov(esi, ToRegister(context)); | 
| 531 } | 515 } | 
| 532 } else if (context->IsStackSlot()) { | 516 } else if (context->IsStackSlot()) { | 
| 533 __ mov(esi, ToOperand(context)); | 517 __ mov(esi, ToOperand(context)); | 
| 534 } else if (context->IsConstantOperand()) { | 518 } else if (context->IsConstantOperand()) { | 
| 535 Handle<Object> literal = | 519 Handle<Object> literal = | 
| 536 chunk_->LookupLiteral(LConstantOperand::cast(context)); | 520 chunk_->LookupLiteral(LConstantOperand::cast(context)); | 
| 537 LoadHeapObject(esi, Handle<Context>::cast(literal)); | 521 LoadHeapObject(esi, Handle<Context>::cast(literal)); | 
| 538 } else { | 522 } else { | 
| 539 UNREACHABLE(); | 523 UNREACHABLE(); | 
| 540 } | 524 } | 
| 541 | 525 | 
| 542 __ CallRuntimeSaveDoubles(id); | 526 __ CallRuntimeSaveDoubles(id); | 
| 543 RecordSafepointWithRegisters( | 527 RecordSafepointWithRegisters( | 
| 544 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex); | 528 instr->pointer_map(), argc, Safepoint::kNoDeopt); | 
| 545 } | 529 } | 
| 546 | 530 | 
| 547 | 531 | 
| 548 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr, | 532 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment, | 
| 549 SafepointMode safepoint_mode) { | 533 bool is_lazy) { | 
| 550 // Create the environment to bailout to. If the call has side effects | |
| 551 // execution has to continue after the call otherwise execution can continue | |
| 552 // from a previous bailout point repeating the call. | |
| 553 LEnvironment* deoptimization_environment; | |
| 554 if (instr->HasDeoptimizationEnvironment()) { | |
| 555 deoptimization_environment = instr->deoptimization_environment(); | |
| 556 } else { | |
| 557 deoptimization_environment = instr->environment(); | |
| 558 } | |
| 559 | |
| 560 RegisterEnvironmentForDeoptimization(deoptimization_environment); | |
| 561 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { | |
| 562 RecordSafepoint(instr->pointer_map(), | |
| 563 deoptimization_environment->deoptimization_index()); | |
| 564 } else { | |
| 565 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | |
| 566 RecordSafepointWithRegisters( | |
| 567 instr->pointer_map(), | |
| 568 0, | |
| 569 deoptimization_environment->deoptimization_index()); | |
| 570 } | |
| 571 } | |
| 572 | |
| 573 | |
| 574 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) { | |
| 575 if (!environment->HasBeenRegistered()) { | 534 if (!environment->HasBeenRegistered()) { | 
| 576 // Physical stack frame layout: | 535 // Physical stack frame layout: | 
| 577 // -x ............. -4 0 ..................................... y | 536 // -x ............. -4 0 ..................................... y | 
| 578 // [incoming arguments] [spill slots] [pushed outgoing arguments] | 537 // [incoming arguments] [spill slots] [pushed outgoing arguments] | 
| 579 | 538 | 
| 580 // Layout of the environment: | 539 // Layout of the environment: | 
| 581 // 0 ..................................................... size-1 | 540 // 0 ..................................................... size-1 | 
| 582 // [parameters] [locals] [expression stack including arguments] | 541 // [parameters] [locals] [expression stack including arguments] | 
| 583 | 542 | 
| 584 // Layout of the translation: | 543 // Layout of the translation: | 
| 585 // 0 ........................................................ size - 1 + 4 | 544 // 0 ........................................................ size - 1 + 4 | 
| 586 // [expression stack including arguments] [locals] [4 words] [parameters] | 545 // [expression stack including arguments] [locals] [4 words] [parameters] | 
| 587 // |>------------ translation_size ------------<| | 546 // |>------------ translation_size ------------<| | 
| 588 | 547 | 
| 589 int frame_count = 0; | 548 int frame_count = 0; | 
| 590 for (LEnvironment* e = environment; e != NULL; e = e->outer()) { | 549 for (LEnvironment* e = environment; e != NULL; e = e->outer()) { | 
| 591 ++frame_count; | 550 ++frame_count; | 
| 592 } | 551 } | 
| 593 Translation translation(&translations_, frame_count); | 552 Translation translation(&translations_, frame_count); | 
| 594 WriteTranslation(environment, &translation); | 553 WriteTranslation(environment, &translation); | 
| 595 int deoptimization_index = deoptimizations_.length(); | 554 int deoptimization_index = deoptimizations_.length(); | 
| 596 environment->Register(deoptimization_index, translation.index()); | 555 int pc_offset = masm()->pc_offset(); | 
| 556 environment->Register(deoptimization_index, | |
| 557 translation.index(), | |
| 558 is_lazy ? pc_offset : -1); | |
| 597 deoptimizations_.Add(environment); | 559 deoptimizations_.Add(environment); | 
| 598 } | 560 } | 
| 599 } | 561 } | 
| 600 | 562 | 
| 601 | 563 | 
| 602 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { | 564 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { | 
| 603 RegisterEnvironmentForDeoptimization(environment); | 565 RegisterEnvironmentForDeoptimization(environment, false); // Not lazy. | 
| 604 ASSERT(environment->HasBeenRegistered()); | 566 ASSERT(environment->HasBeenRegistered()); | 
| 605 int id = environment->deoptimization_index(); | 567 int id = environment->deoptimization_index(); | 
| 606 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER); | 568 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER); | 
| 607 ASSERT(entry != NULL); | 569 ASSERT(entry != NULL); | 
| 608 if (entry == NULL) { | 570 if (entry == NULL) { | 
| 609 Abort("bailout was not prepared"); | 571 Abort("bailout was not prepared"); | 
| 610 return; | 572 return; | 
| 611 } | 573 } | 
| 612 | 574 | 
| 613 if (FLAG_deopt_every_n_times != 0) { | 575 if (FLAG_deopt_every_n_times != 0) { | 
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 673 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id())); | 635 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id())); | 
| 674 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_)); | 636 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_)); | 
| 675 | 637 | 
| 676 // Populate the deoptimization entries. | 638 // Populate the deoptimization entries. | 
| 677 for (int i = 0; i < length; i++) { | 639 for (int i = 0; i < length; i++) { | 
| 678 LEnvironment* env = deoptimizations_[i]; | 640 LEnvironment* env = deoptimizations_[i]; | 
| 679 data->SetAstId(i, Smi::FromInt(env->ast_id())); | 641 data->SetAstId(i, Smi::FromInt(env->ast_id())); | 
| 680 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index())); | 642 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index())); | 
| 681 data->SetArgumentsStackHeight(i, | 643 data->SetArgumentsStackHeight(i, | 
| 682 Smi::FromInt(env->arguments_stack_height())); | 644 Smi::FromInt(env->arguments_stack_height())); | 
| 645 data->SetPc(i, Smi::FromInt(env->pc_offset())); | |
| 683 } | 646 } | 
| 684 code->set_deoptimization_data(*data); | 647 code->set_deoptimization_data(*data); | 
| 685 } | 648 } | 
| 686 | 649 | 
| 687 | 650 | 
| 688 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) { | 651 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) { | 
| 689 int result = deoptimization_literals_.length(); | 652 int result = deoptimization_literals_.length(); | 
| 690 for (int i = 0; i < deoptimization_literals_.length(); ++i) { | 653 for (int i = 0; i < deoptimization_literals_.length(); ++i) { | 
| 691 if (deoptimization_literals_[i].is_identical_to(literal)) return i; | 654 if (deoptimization_literals_[i].is_identical_to(literal)) return i; | 
| 692 } | 655 } | 
| (...skipping 11 matching lines...) Expand all Loading... | |
| 704 for (int i = 0, length = inlined_closures->length(); | 667 for (int i = 0, length = inlined_closures->length(); | 
| 705 i < length; | 668 i < length; | 
| 706 i++) { | 669 i++) { | 
| 707 DefineDeoptimizationLiteral(inlined_closures->at(i)); | 670 DefineDeoptimizationLiteral(inlined_closures->at(i)); | 
| 708 } | 671 } | 
| 709 | 672 | 
| 710 inlined_function_count_ = deoptimization_literals_.length(); | 673 inlined_function_count_ = deoptimization_literals_.length(); | 
| 711 } | 674 } | 
| 712 | 675 | 
| 713 | 676 | 
| 677 void LCodeGen::RecordSafepointWithLazyDeopt( | |
| 678 LInstruction* instr, SafepointMode safepoint_mode) { | |
| 679 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { | |
| 680 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt); | |
| 681 } else { | |
| 682 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | |
| 683 RecordSafepointWithRegisters( | |
| 684 instr->pointer_map(), 0, Safepoint::kLazyDeopt); | |
| 685 } | |
| 686 } | |
| 687 | |
| 688 | |
| 714 void LCodeGen::RecordSafepoint( | 689 void LCodeGen::RecordSafepoint( | 
| 715 LPointerMap* pointers, | 690 LPointerMap* pointers, | 
| 716 Safepoint::Kind kind, | 691 Safepoint::Kind kind, | 
| 717 int arguments, | 692 int arguments, | 
| 718 int deoptimization_index) { | 693 Safepoint::DeoptMode deopt_mode) { | 
| 719 ASSERT(kind == expected_safepoint_kind_); | 694 ASSERT(kind == expected_safepoint_kind_); | 
| 720 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands(); | 695 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands(); | 
| 721 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), | 696 Safepoint safepoint = | 
| 722 kind, arguments, deoptimization_index); | 697 safepoints_.DefineSafepoint(masm(), kind, arguments, deopt_mode); | 
| 723 for (int i = 0; i < operands->length(); i++) { | 698 for (int i = 0; i < operands->length(); i++) { | 
| 724 LOperand* pointer = operands->at(i); | 699 LOperand* pointer = operands->at(i); | 
| 725 if (pointer->IsStackSlot()) { | 700 if (pointer->IsStackSlot()) { | 
| 726 safepoint.DefinePointerSlot(pointer->index()); | 701 safepoint.DefinePointerSlot(pointer->index()); | 
| 727 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { | 702 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { | 
| 728 safepoint.DefinePointerRegister(ToRegister(pointer)); | 703 safepoint.DefinePointerRegister(ToRegister(pointer)); | 
| 729 } | 704 } | 
| 730 } | 705 } | 
| 731 } | 706 } | 
| 732 | 707 | 
| 733 | 708 | 
| 734 void LCodeGen::RecordSafepoint(LPointerMap* pointers, | 709 void LCodeGen::RecordSafepoint(LPointerMap* pointers, | 
| 735 int deoptimization_index) { | 710 Safepoint::DeoptMode mode) { | 
| 736 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index); | 711 RecordSafepoint(pointers, Safepoint::kSimple, 0, mode); | 
| 737 } | 712 } | 
| 738 | 713 | 
| 739 | 714 | 
| 740 void LCodeGen::RecordSafepoint(int deoptimization_index) { | 715 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode mode) { | 
| 741 LPointerMap empty_pointers(RelocInfo::kNoPosition); | 716 LPointerMap empty_pointers(RelocInfo::kNoPosition); | 
| 742 RecordSafepoint(&empty_pointers, deoptimization_index); | 717 RecordSafepoint(&empty_pointers, mode); | 
| 743 } | 718 } | 
| 744 | 719 | 
| 745 | 720 | 
| 746 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, | 721 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, | 
| 747 int arguments, | 722 int arguments, | 
| 748 int deoptimization_index) { | 723 Safepoint::DeoptMode mode) { | 
| 749 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, | 724 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, mode); | 
| 750 deoptimization_index); | |
| 751 } | 725 } | 
| 752 | 726 | 
| 753 | 727 | 
| 754 void LCodeGen::RecordPosition(int position) { | 728 void LCodeGen::RecordPosition(int position) { | 
| 755 if (position == RelocInfo::kNoPosition) return; | 729 if (position == RelocInfo::kNoPosition) return; | 
| 756 masm()->positions_recorder()->RecordPosition(position); | 730 masm()->positions_recorder()->RecordPosition(position); | 
| 757 } | 731 } | 
| 758 | 732 | 
| 759 | 733 | 
| 760 void LCodeGen::DoLabel(LLabel* label) { | 734 void LCodeGen::DoLabel(LLabel* label) { | 
| (...skipping 14 matching lines...) Expand all Loading... | |
| 775 | 749 | 
| 776 | 750 | 
| 777 void LCodeGen::DoGap(LGap* gap) { | 751 void LCodeGen::DoGap(LGap* gap) { | 
| 778 for (int i = LGap::FIRST_INNER_POSITION; | 752 for (int i = LGap::FIRST_INNER_POSITION; | 
| 779 i <= LGap::LAST_INNER_POSITION; | 753 i <= LGap::LAST_INNER_POSITION; | 
| 780 i++) { | 754 i++) { | 
| 781 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i); | 755 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i); | 
| 782 LParallelMove* move = gap->GetParallelMove(inner_pos); | 756 LParallelMove* move = gap->GetParallelMove(inner_pos); | 
| 783 if (move != NULL) DoParallelMove(move); | 757 if (move != NULL) DoParallelMove(move); | 
| 784 } | 758 } | 
| 785 | |
| 786 LInstruction* next = GetNextInstruction(); | |
| 787 if (next != NULL && next->IsLazyBailout()) { | |
| 788 int pc = masm()->pc_offset(); | |
| 789 safepoints_.SetPcAfterGap(pc); | |
| 790 } | |
| 791 } | 759 } | 
| 792 | 760 | 
| 793 | 761 | 
| 794 void LCodeGen::DoInstructionGap(LInstructionGap* instr) { | 762 void LCodeGen::DoInstructionGap(LInstructionGap* instr) { | 
| 795 DoGap(instr); | 763 DoGap(instr); | 
| 796 } | 764 } | 
| 797 | 765 | 
| 798 | 766 | 
| 799 void LCodeGen::DoParameter(LParameter* instr) { | 767 void LCodeGen::DoParameter(LParameter* instr) { | 
| 800 // Nothing to do. | 768 // Nothing to do. | 
| (...skipping 1120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1921 } | 1889 } | 
| 1922 | 1890 | 
| 1923 | 1891 | 
| 1924 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { | 1892 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { | 
| 1925 class DeferredInstanceOfKnownGlobal: public LDeferredCode { | 1893 class DeferredInstanceOfKnownGlobal: public LDeferredCode { | 
| 1926 public: | 1894 public: | 
| 1927 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, | 1895 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, | 
| 1928 LInstanceOfKnownGlobal* instr) | 1896 LInstanceOfKnownGlobal* instr) | 
| 1929 : LDeferredCode(codegen), instr_(instr) { } | 1897 : LDeferredCode(codegen), instr_(instr) { } | 
| 1930 virtual void Generate() { | 1898 virtual void Generate() { | 
| 1931 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_); | 1899 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_); | 
| 1932 } | 1900 } | 
| 1933 virtual LInstruction* instr() { return instr_; } | 1901 virtual LInstruction* instr() { return instr_; } | 
| 1934 Label* map_check() { return &map_check_; } | 1902 Label* map_check() { return &map_check_; } | 
| 1935 private: | 1903 private: | 
| 1936 LInstanceOfKnownGlobal* instr_; | 1904 LInstanceOfKnownGlobal* instr_; | 
| 1937 Label map_check_; | 1905 Label map_check_; | 
| 1938 }; | 1906 }; | 
| 1939 | 1907 | 
| 1940 DeferredInstanceOfKnownGlobal* deferred; | 1908 DeferredInstanceOfKnownGlobal* deferred; | 
| 1941 deferred = new DeferredInstanceOfKnownGlobal(this, instr); | 1909 deferred = new DeferredInstanceOfKnownGlobal(this, instr); | 
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1976 __ bind(&false_result); | 1944 __ bind(&false_result); | 
| 1977 __ mov(ToRegister(instr->result()), factory()->false_value()); | 1945 __ mov(ToRegister(instr->result()), factory()->false_value()); | 
| 1978 | 1946 | 
| 1979 // Here result has either true or false. Deferred code also produces true or | 1947 // Here result has either true or false. Deferred code also produces true or | 
| 1980 // false object. | 1948 // false object. | 
| 1981 __ bind(deferred->exit()); | 1949 __ bind(deferred->exit()); | 
| 1982 __ bind(&done); | 1950 __ bind(&done); | 
| 1983 } | 1951 } | 
| 1984 | 1952 | 
| 1985 | 1953 | 
| 1986 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, | 1954 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, | 
| 1987 Label* map_check) { | 1955 Label* map_check) { | 
| 1988 PushSafepointRegistersScope scope(this); | 1956 PushSafepointRegistersScope scope(this); | 
| 1989 | 1957 | 
| 1990 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; | 1958 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; | 
| 1991 flags = static_cast<InstanceofStub::Flags>( | 1959 flags = static_cast<InstanceofStub::Flags>( | 
| 1992 flags | InstanceofStub::kArgsInRegisters); | 1960 flags | InstanceofStub::kArgsInRegisters); | 
| 1993 flags = static_cast<InstanceofStub::Flags>( | 1961 flags = static_cast<InstanceofStub::Flags>( | 
| 1994 flags | InstanceofStub::kCallSiteInlineCheck); | 1962 flags | InstanceofStub::kCallSiteInlineCheck); | 
| 1995 flags = static_cast<InstanceofStub::Flags>( | 1963 flags = static_cast<InstanceofStub::Flags>( | 
| 1996 flags | InstanceofStub::kReturnTrueFalseObject); | 1964 flags | InstanceofStub::kReturnTrueFalseObject); | 
| 1997 InstanceofStub stub(flags); | 1965 InstanceofStub stub(flags); | 
| 1998 | 1966 | 
| 1999 // Get the temp register reserved by the instruction. This needs to be a | 1967 // Get the temp register reserved by the instruction. This needs to be a | 
| 2000 // register which is pushed last by PushSafepointRegisters as top of the | 1968 // register which is pushed last by PushSafepointRegisters as top of the | 
| 2001 // stack is used to pass the offset to the location of the map check to | 1969 // stack is used to pass the offset to the location of the map check to | 
| 2002 // the stub. | 1970 // the stub. | 
| 2003 Register temp = ToRegister(instr->TempAt(0)); | 1971 Register temp = ToRegister(instr->TempAt(0)); | 
| 2004 ASSERT(MacroAssembler::SafepointRegisterStackIndex(temp) == 0); | 1972 ASSERT(MacroAssembler::SafepointRegisterStackIndex(temp) == 0); | 
| 2005 __ mov(InstanceofStub::right(), Immediate(instr->function())); | 1973 __ mov(InstanceofStub::right(), Immediate(instr->function())); | 
| 2006 static const int kAdditionalDelta = 13; | 1974 static const int kAdditionalDelta = 13; | 
| 2007 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; | 1975 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; | 
| 2008 __ mov(temp, Immediate(delta)); | 1976 __ mov(temp, Immediate(delta)); | 
| 2009 __ StoreToSafepointRegisterSlot(temp, temp); | 1977 __ StoreToSafepointRegisterSlot(temp, temp); | 
| 2010 CallCodeGeneric(stub.GetCode(), | 1978 RecordPosition(instr->pointer_map()->position()); | 
| 
Vyacheslav Egorov (Chromium)
2011/11/10 15:50:54
I think this can be rolled back into CallCodeGener
 
fschneider
2011/11/11 14:09:08
Correct. Done.
 | |
| 2011 RelocInfo::CODE_TARGET, | 1979 __ call(stub.GetCode(), RelocInfo::CODE_TARGET); | 
| 2012 instr, | 1980 | 
| 2013 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 1981 RecordSafepointWithLazyDeopt( | 
| 1982 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | |
| 1983 ASSERT(instr->HasDeoptimizationEnvironment()); | |
| 1984 LEnvironment* env = instr->deoptimization_environment(); | |
| 1985 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | |
| 1986 | |
| 2014 // Put the result value into the eax slot and restore all registers. | 1987 // Put the result value into the eax slot and restore all registers. | 
| 2015 __ StoreToSafepointRegisterSlot(eax, eax); | 1988 __ StoreToSafepointRegisterSlot(eax, eax); | 
| 2016 } | 1989 } | 
| 2017 | 1990 | 
| 2018 | 1991 | 
| 2019 static Condition ComputeCompareCondition(Token::Value op) { | 1992 static Condition ComputeCompareCondition(Token::Value op) { | 
| 2020 switch (op) { | 1993 switch (op) { | 
| 2021 case Token::EQ_STRICT: | 1994 case Token::EQ_STRICT: | 
| 2022 case Token::EQ: | 1995 case Token::EQ: | 
| 2023 return equal; | 1996 return equal; | 
| (...skipping 585 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2609 __ j(zero, &invoke, Label::kNear); | 2582 __ j(zero, &invoke, Label::kNear); | 
| 2610 __ bind(&loop); | 2583 __ bind(&loop); | 
| 2611 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize)); | 2584 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize)); | 
| 2612 __ dec(length); | 2585 __ dec(length); | 
| 2613 __ j(not_zero, &loop); | 2586 __ j(not_zero, &loop); | 
| 2614 | 2587 | 
| 2615 // Invoke the function. | 2588 // Invoke the function. | 
| 2616 __ bind(&invoke); | 2589 __ bind(&invoke); | 
| 2617 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 2590 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 
| 2618 LPointerMap* pointers = instr->pointer_map(); | 2591 LPointerMap* pointers = instr->pointer_map(); | 
| 2619 LEnvironment* env = instr->deoptimization_environment(); | |
| 2620 RecordPosition(pointers->position()); | 2592 RecordPosition(pointers->position()); | 
| 2621 RegisterEnvironmentForDeoptimization(env); | 2593 SafepointGenerator safepoint_generator( | 
| 2622 SafepointGenerator safepoint_generator(this, | 2594 this, pointers, Safepoint::kLazyDeopt); | 
| 2623 pointers, | |
| 2624 env->deoptimization_index()); | |
| 2625 ParameterCount actual(eax); | 2595 ParameterCount actual(eax); | 
| 2626 __ InvokeFunction(function, actual, CALL_FUNCTION, | 2596 __ InvokeFunction(function, actual, CALL_FUNCTION, | 
| 2627 safepoint_generator, CALL_AS_METHOD); | 2597 safepoint_generator, CALL_AS_METHOD); | 
| 2628 } | 2598 } | 
| 2629 | 2599 | 
| 2630 | 2600 | 
| 2631 void LCodeGen::DoPushArgument(LPushArgument* instr) { | 2601 void LCodeGen::DoPushArgument(LPushArgument* instr) { | 
| 2632 LOperand* argument = instr->InputAt(0); | 2602 LOperand* argument = instr->InputAt(0); | 
| 2633 if (argument->IsConstantOperand()) { | 2603 if (argument->IsConstantOperand()) { | 
| 2634 __ push(ToImmediate(argument)); | 2604 __ push(ToImmediate(argument)); | 
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2697 RecordPosition(pointers->position()); | 2667 RecordPosition(pointers->position()); | 
| 2698 | 2668 | 
| 2699 // Invoke function. | 2669 // Invoke function. | 
| 2700 __ SetCallKind(ecx, call_kind); | 2670 __ SetCallKind(ecx, call_kind); | 
| 2701 if (*function == *info()->closure()) { | 2671 if (*function == *info()->closure()) { | 
| 2702 __ CallSelf(); | 2672 __ CallSelf(); | 
| 2703 } else { | 2673 } else { | 
| 2704 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); | 2674 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); | 
| 2705 } | 2675 } | 
| 2706 | 2676 | 
| 2707 // Setup deoptimization. | 2677 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); | 
| 2708 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT); | |
| 2709 } | 2678 } | 
| 2710 | 2679 | 
| 2711 | 2680 | 
| 2712 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { | 2681 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { | 
| 2713 ASSERT(ToRegister(instr->result()).is(eax)); | 2682 ASSERT(ToRegister(instr->result()).is(eax)); | 
| 2714 __ mov(edi, instr->function()); | 2683 __ mov(edi, instr->function()); | 
| 2715 CallKnownFunction(instr->function(), | 2684 CallKnownFunction(instr->function(), | 
| 2716 instr->arity(), | 2685 instr->arity(), | 
| 2717 instr, | 2686 instr, | 
| 2718 CALL_AS_METHOD); | 2687 CALL_AS_METHOD); | 
| (...skipping 355 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3074 } | 3043 } | 
| 3075 } | 3044 } | 
| 3076 | 3045 | 
| 3077 | 3046 | 
| 3078 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { | 3047 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { | 
| 3079 ASSERT(ToRegister(instr->context()).is(esi)); | 3048 ASSERT(ToRegister(instr->context()).is(esi)); | 
| 3080 ASSERT(ToRegister(instr->function()).is(edi)); | 3049 ASSERT(ToRegister(instr->function()).is(edi)); | 
| 3081 ASSERT(instr->HasPointerMap()); | 3050 ASSERT(instr->HasPointerMap()); | 
| 3082 ASSERT(instr->HasDeoptimizationEnvironment()); | 3051 ASSERT(instr->HasDeoptimizationEnvironment()); | 
| 3083 LPointerMap* pointers = instr->pointer_map(); | 3052 LPointerMap* pointers = instr->pointer_map(); | 
| 3084 LEnvironment* env = instr->deoptimization_environment(); | |
| 3085 RecordPosition(pointers->position()); | 3053 RecordPosition(pointers->position()); | 
| 3086 RegisterEnvironmentForDeoptimization(env); | 3054 SafepointGenerator generator( | 
| 3087 SafepointGenerator generator(this, pointers, env->deoptimization_index()); | 3055 this, pointers, Safepoint::kLazyDeopt); | 
| 3088 ParameterCount count(instr->arity()); | 3056 ParameterCount count(instr->arity()); | 
| 3089 __ InvokeFunction(edi, count, CALL_FUNCTION, generator, CALL_AS_METHOD); | 3057 __ InvokeFunction(edi, count, CALL_FUNCTION, generator, CALL_AS_METHOD); | 
| 3090 } | 3058 } | 
| 3091 | 3059 | 
| 3092 | 3060 | 
| 3093 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { | 3061 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { | 
| 3094 ASSERT(ToRegister(instr->context()).is(esi)); | 3062 ASSERT(ToRegister(instr->context()).is(esi)); | 
| 3095 ASSERT(ToRegister(instr->key()).is(ecx)); | 3063 ASSERT(ToRegister(instr->key()).is(ecx)); | 
| 3096 ASSERT(ToRegister(instr->result()).is(eax)); | 3064 ASSERT(ToRegister(instr->result()).is(eax)); | 
| 3097 | 3065 | 
| (...skipping 548 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3646 // register is stored, as this register is in the pointer map, but contains an | 3614 // register is stored, as this register is in the pointer map, but contains an | 
| 3647 // integer value. | 3615 // integer value. | 
| 3648 __ StoreToSafepointRegisterSlot(reg, Immediate(0)); | 3616 __ StoreToSafepointRegisterSlot(reg, Immediate(0)); | 
| 3649 // NumberTagI and NumberTagD use the context from the frame, rather than | 3617 // NumberTagI and NumberTagD use the context from the frame, rather than | 
| 3650 // the environment's HContext or HInlinedContext value. | 3618 // the environment's HContext or HInlinedContext value. | 
| 3651 // They only call Runtime::kAllocateHeapNumber. | 3619 // They only call Runtime::kAllocateHeapNumber. | 
| 3652 // The corresponding HChange instructions are added in a phase that does | 3620 // The corresponding HChange instructions are added in a phase that does | 
| 3653 // not have easy access to the local context. | 3621 // not have easy access to the local context. | 
| 3654 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 3622 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 
| 3655 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | 3623 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | 
| 3656 RecordSafepointWithRegisters( | 3624 RecordSafepointWithRegisters(instr->pointer_map(), 0, Safepoint::kNoDeopt); | 
| 3657 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); | |
| 3658 if (!reg.is(eax)) __ mov(reg, eax); | 3625 if (!reg.is(eax)) __ mov(reg, eax); | 
| 3659 | 3626 | 
| 3660 // Done. Put the value in xmm0 into the value of the allocated heap | 3627 // Done. Put the value in xmm0 into the value of the allocated heap | 
| 3661 // number. | 3628 // number. | 
| 3662 __ bind(&done); | 3629 __ bind(&done); | 
| 3663 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0); | 3630 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0); | 
| 3664 __ StoreToSafepointRegisterSlot(reg, reg); | 3631 __ StoreToSafepointRegisterSlot(reg, reg); | 
| 3665 } | 3632 } | 
| 3666 | 3633 | 
| 3667 | 3634 | 
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3699 __ Set(reg, Immediate(0)); | 3666 __ Set(reg, Immediate(0)); | 
| 3700 | 3667 | 
| 3701 PushSafepointRegistersScope scope(this); | 3668 PushSafepointRegistersScope scope(this); | 
| 3702 // NumberTagI and NumberTagD use the context from the frame, rather than | 3669 // NumberTagI and NumberTagD use the context from the frame, rather than | 
| 3703 // the environment's HContext or HInlinedContext value. | 3670 // the environment's HContext or HInlinedContext value. | 
| 3704 // They only call Runtime::kAllocateHeapNumber. | 3671 // They only call Runtime::kAllocateHeapNumber. | 
| 3705 // The corresponding HChange instructions are added in a phase that does | 3672 // The corresponding HChange instructions are added in a phase that does | 
| 3706 // not have easy access to the local context. | 3673 // not have easy access to the local context. | 
| 3707 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 3674 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 
| 3708 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | 3675 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | 
| 3709 RecordSafepointWithRegisters(instr->pointer_map(), 0, | 3676 RecordSafepointWithRegisters(instr->pointer_map(), 0, Safepoint::kNoDeopt); | 
| 3710 Safepoint::kNoDeoptimizationIndex); | |
| 3711 __ StoreToSafepointRegisterSlot(reg, eax); | 3677 __ StoreToSafepointRegisterSlot(reg, eax); | 
| 3712 } | 3678 } | 
| 3713 | 3679 | 
| 3714 | 3680 | 
| 3715 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 3681 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 
| 3716 LOperand* input = instr->InputAt(0); | 3682 LOperand* input = instr->InputAt(0); | 
| 3717 ASSERT(input->IsRegister() && input->Equals(instr->result())); | 3683 ASSERT(input->IsRegister() && input->Equals(instr->result())); | 
| 3718 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); | 3684 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); | 
| 3719 __ SmiTag(ToRegister(input)); | 3685 __ SmiTag(ToRegister(input)); | 
| 3720 } | 3686 } | 
| (...skipping 722 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4443 __ j(not_equal, &check_frame_marker, Label::kNear); | 4409 __ j(not_equal, &check_frame_marker, Label::kNear); | 
| 4444 __ mov(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset)); | 4410 __ mov(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset)); | 
| 4445 | 4411 | 
| 4446 // Check the marker in the calling frame. | 4412 // Check the marker in the calling frame. | 
| 4447 __ bind(&check_frame_marker); | 4413 __ bind(&check_frame_marker); | 
| 4448 __ cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), | 4414 __ cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), | 
| 4449 Immediate(Smi::FromInt(StackFrame::CONSTRUCT))); | 4415 Immediate(Smi::FromInt(StackFrame::CONSTRUCT))); | 
| 4450 } | 4416 } | 
| 4451 | 4417 | 
| 4452 | 4418 | 
| 4453 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 4419 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 
| 
Vyacheslav Egorov (Chromium)
2011/11/10 15:50:54
maybe instruction should be renamed to LazyDeopt?
 
fschneider
2011/11/11 14:09:08
Yes, I'll do a renaming only change.
 | |
| 4454 // No code for lazy bailout instruction. Used to capture environment after a | 4420 ASSERT(instr->HasEnvironment()); | 
| 4455 // call for populating the safepoint data with deoptimization data. | 4421 LEnvironment* env = instr->environment(); | 
| 4422 RegisterEnvironmentForDeoptimization(env, true); // Lazy deoptimization. | |
| 4423 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | |
| 4456 } | 4424 } | 
| 4457 | 4425 | 
| 4458 | 4426 | 
| 4459 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 4427 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 
| 4460 DeoptimizeIf(no_condition, instr->environment()); | 4428 DeoptimizeIf(no_condition, instr->environment()); | 
| 4461 } | 4429 } | 
| 4462 | 4430 | 
| 4463 | 4431 | 
| 4464 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { | 4432 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { | 
| 4465 LOperand* obj = instr->object(); | 4433 LOperand* obj = instr->object(); | 
| 4466 LOperand* key = instr->key(); | 4434 LOperand* key = instr->key(); | 
| 4467 __ push(ToOperand(obj)); | 4435 __ push(ToOperand(obj)); | 
| 4468 if (key->IsConstantOperand()) { | 4436 if (key->IsConstantOperand()) { | 
| 4469 __ push(ToImmediate(key)); | 4437 __ push(ToImmediate(key)); | 
| 4470 } else { | 4438 } else { | 
| 4471 __ push(ToOperand(key)); | 4439 __ push(ToOperand(key)); | 
| 4472 } | 4440 } | 
| 4473 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 4441 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 
| 4474 LPointerMap* pointers = instr->pointer_map(); | 4442 LPointerMap* pointers = instr->pointer_map(); | 
| 4475 LEnvironment* env = instr->deoptimization_environment(); | |
| 4476 RecordPosition(pointers->position()); | 4443 RecordPosition(pointers->position()); | 
| 4477 RegisterEnvironmentForDeoptimization(env); | |
| 4478 // Create safepoint generator that will also ensure enough space in the | 4444 // Create safepoint generator that will also ensure enough space in the | 
| 4479 // reloc info for patching in deoptimization (since this is invoking a | 4445 // reloc info for patching in deoptimization (since this is invoking a | 
| 4480 // builtin) | 4446 // builtin) | 
| 4481 SafepointGenerator safepoint_generator(this, | 4447 SafepointGenerator safepoint_generator( | 
| 4482 pointers, | 4448 this, pointers, Safepoint::kLazyDeopt); | 
| 4483 env->deoptimization_index()); | |
| 4484 __ push(Immediate(Smi::FromInt(strict_mode_flag()))); | 4449 __ push(Immediate(Smi::FromInt(strict_mode_flag()))); | 
| 4485 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator); | 4450 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator); | 
| 4486 } | 4451 } | 
| 4487 | 4452 | 
| 4488 | 4453 | 
| 4489 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { | 4454 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { | 
| 4490 { | 4455 PushSafepointRegistersScope scope(this); | 
| 4491 PushSafepointRegistersScope scope(this); | 4456 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 
| 4492 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 4457 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); | 
| 4493 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); | 4458 RecordSafepointWithLazyDeopt( | 
| 4494 RegisterLazyDeoptimization( | 4459 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 
| 4495 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 4460 ASSERT(instr->HasEnvironment()); | 
| 4496 } | 4461 LEnvironment* env = instr->environment(); | 
| 4497 | 4462 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 
| 4498 // The gap code includes the restoring of the safepoint registers. | |
| 4499 int pc = masm()->pc_offset(); | |
| 4500 safepoints_.SetPcAfterGap(pc); | |
| 4501 } | 4463 } | 
| 4502 | 4464 | 
| 4503 | 4465 | 
| 4504 void LCodeGen::DoStackCheck(LStackCheck* instr) { | 4466 void LCodeGen::DoStackCheck(LStackCheck* instr) { | 
| 4505 class DeferredStackCheck: public LDeferredCode { | 4467 class DeferredStackCheck: public LDeferredCode { | 
| 4506 public: | 4468 public: | 
| 4507 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr) | 4469 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr) | 
| 4508 : LDeferredCode(codegen), instr_(instr) { } | 4470 : LDeferredCode(codegen), instr_(instr) { } | 
| 4509 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } | 4471 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } | 
| 4510 virtual LInstruction* instr() { return instr_; } | 4472 virtual LInstruction* instr() { return instr_; } | 
| 4511 private: | 4473 private: | 
| 4512 LStackCheck* instr_; | 4474 LStackCheck* instr_; | 
| 4513 }; | 4475 }; | 
| 4514 | 4476 | 
| 4477 ASSERT(instr->HasEnvironment()); | |
| 4478 LEnvironment* env = instr->environment(); | |
| 4515 if (instr->hydrogen()->is_function_entry()) { | 4479 if (instr->hydrogen()->is_function_entry()) { | 
| 4516 // Perform stack overflow check. | 4480 // Perform stack overflow check. | 
| 4517 Label done; | 4481 Label done; | 
| 4518 ExternalReference stack_limit = | 4482 ExternalReference stack_limit = | 
| 4519 ExternalReference::address_of_stack_limit(isolate()); | 4483 ExternalReference::address_of_stack_limit(isolate()); | 
| 4520 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 4484 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 
| 4521 __ j(above_equal, &done, Label::kNear); | 4485 __ j(above_equal, &done, Label::kNear); | 
| 4522 | 4486 | 
| 4523 ASSERT(instr->context()->IsRegister()); | 4487 ASSERT(instr->context()->IsRegister()); | 
| 4524 ASSERT(ToRegister(instr->context()).is(esi)); | 4488 ASSERT(ToRegister(instr->context()).is(esi)); | 
| 4525 StackCheckStub stub; | 4489 StackCheckStub stub; | 
| 4526 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 4490 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 
| 4527 __ bind(&done); | 4491 __ bind(&done); | 
| 4492 RegisterEnvironmentForDeoptimization(env, true); // Lazy deoptimization. | |
| 4493 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | |
| 4528 } else { | 4494 } else { | 
| 4529 ASSERT(instr->hydrogen()->is_backwards_branch()); | 4495 ASSERT(instr->hydrogen()->is_backwards_branch()); | 
| 4530 // Perform stack overflow check if this goto needs it before jumping. | 4496 // Perform stack overflow check if this goto needs it before jumping. | 
| 4531 DeferredStackCheck* deferred_stack_check = | 4497 DeferredStackCheck* deferred_stack_check = | 
| 4532 new DeferredStackCheck(this, instr); | 4498 new DeferredStackCheck(this, instr); | 
| 4533 ExternalReference stack_limit = | 4499 ExternalReference stack_limit = | 
| 4534 ExternalReference::address_of_stack_limit(isolate()); | 4500 ExternalReference::address_of_stack_limit(isolate()); | 
| 4535 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 4501 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 
| 4536 __ j(below, deferred_stack_check->entry()); | 4502 __ j(below, deferred_stack_check->entry()); | 
| 4537 __ bind(instr->done_label()); | 4503 __ bind(instr->done_label()); | 
| 4538 deferred_stack_check->SetExit(instr->done_label()); | 4504 deferred_stack_check->SetExit(instr->done_label()); | 
| 4505 RegisterEnvironmentForDeoptimization(env, true); // Lazy deoptimization. | |
| 4506 // Don't record a deoptimization index for the safepoint here. | |
| 4507 // This will be done explicitly when emitting call and the safepoint in | |
| 4508 // the deferred code. | |
| 4539 } | 4509 } | 
| 4540 } | 4510 } | 
| 4541 | 4511 | 
| 4542 | 4512 | 
| 4543 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { | 4513 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { | 
| 4544 // This is a pseudo-instruction that ensures that the environment here is | 4514 // This is a pseudo-instruction that ensures that the environment here is | 
| 4545 // properly registered for deoptimization and records the assembler's PC | 4515 // properly registered for deoptimization and records the assembler's PC | 
| 4546 // offset. | 4516 // offset. | 
| 4547 LEnvironment* environment = instr->environment(); | 4517 LEnvironment* environment = instr->environment(); | 
| 4548 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), | 4518 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), | 
| 4549 instr->SpilledDoubleRegisterArray()); | 4519 instr->SpilledDoubleRegisterArray()); | 
| 4550 | 4520 | 
| 4551 // If the environment were already registered, we would have no way of | 4521 // If the environment were already registered, we would have no way of | 
| 4552 // backpatching it with the spill slot operands. | 4522 // backpatching it with the spill slot operands. | 
| 4553 ASSERT(!environment->HasBeenRegistered()); | 4523 ASSERT(!environment->HasBeenRegistered()); | 
| 4554 RegisterEnvironmentForDeoptimization(environment); | 4524 RegisterEnvironmentForDeoptimization(environment, false); // Not lazy. | 
| 4555 ASSERT(osr_pc_offset_ == -1); | 4525 ASSERT(osr_pc_offset_ == -1); | 
| 4556 osr_pc_offset_ = masm()->pc_offset(); | 4526 osr_pc_offset_ = masm()->pc_offset(); | 
| 4557 } | 4527 } | 
| 4558 | 4528 | 
| 4559 | 4529 | 
| 4560 void LCodeGen::DoIn(LIn* instr) { | 4530 void LCodeGen::DoIn(LIn* instr) { | 
| 4561 LOperand* obj = instr->object(); | 4531 LOperand* obj = instr->object(); | 
| 4562 LOperand* key = instr->key(); | 4532 LOperand* key = instr->key(); | 
| 4563 if (key->IsConstantOperand()) { | 4533 if (key->IsConstantOperand()) { | 
| 4564 __ push(ToImmediate(key)); | 4534 __ push(ToImmediate(key)); | 
| 4565 } else { | 4535 } else { | 
| 4566 __ push(ToOperand(key)); | 4536 __ push(ToOperand(key)); | 
| 4567 } | 4537 } | 
| 4568 if (obj->IsConstantOperand()) { | 4538 if (obj->IsConstantOperand()) { | 
| 4569 __ push(ToImmediate(obj)); | 4539 __ push(ToImmediate(obj)); | 
| 4570 } else { | 4540 } else { | 
| 4571 __ push(ToOperand(obj)); | 4541 __ push(ToOperand(obj)); | 
| 4572 } | 4542 } | 
| 4573 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 4543 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 
| 4574 LPointerMap* pointers = instr->pointer_map(); | 4544 LPointerMap* pointers = instr->pointer_map(); | 
| 4575 LEnvironment* env = instr->deoptimization_environment(); | |
| 4576 RecordPosition(pointers->position()); | 4545 RecordPosition(pointers->position()); | 
| 4577 RegisterEnvironmentForDeoptimization(env); | |
| 4578 // Create safepoint generator that will also ensure enough space in the | 4546 // Create safepoint generator that will also ensure enough space in the | 
| 4579 // reloc info for patching in deoptimization (since this is invoking a | 4547 // reloc info for patching in deoptimization (since this is invoking a | 
| 4580 // builtin) | 4548 // builtin) | 
| 4581 SafepointGenerator safepoint_generator(this, | 4549 SafepointGenerator safepoint_generator( | 
| 4582 pointers, | 4550 this, pointers, Safepoint::kLazyDeopt); | 
| 4583 env->deoptimization_index()); | |
| 4584 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator); | 4551 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator); | 
| 4585 } | 4552 } | 
| 4586 | 4553 | 
| 4587 | 4554 | 
| 4588 #undef __ | 4555 #undef __ | 
| 4589 | 4556 | 
| 4590 } } // namespace v8::internal | 4557 } } // namespace v8::internal | 
| 4591 | 4558 | 
| 4592 #endif // V8_TARGET_ARCH_IA32 | 4559 #endif // V8_TARGET_ARCH_IA32 | 
| OLD | NEW |