Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 25 matching lines...) Expand all Loading... | |
| 36 namespace v8 { | 36 namespace v8 { |
| 37 namespace internal { | 37 namespace internal { |
| 38 | 38 |
| 39 | 39 |
| 40 // When invoking builtins, we need to record the safepoint in the middle of | 40 // When invoking builtins, we need to record the safepoint in the middle of |
| 41 // the invoke instruction sequence generated by the macro assembler. | 41 // the invoke instruction sequence generated by the macro assembler. |
| 42 class SafepointGenerator : public CallWrapper { | 42 class SafepointGenerator : public CallWrapper { |
| 43 public: | 43 public: |
| 44 SafepointGenerator(LCodeGen* codegen, | 44 SafepointGenerator(LCodeGen* codegen, |
| 45 LPointerMap* pointers, | 45 LPointerMap* pointers, |
| 46 int deoptimization_index) | 46 Safepoint::DeoptMode mode) |
| 47 : codegen_(codegen), | 47 : codegen_(codegen), |
| 48 pointers_(pointers), | 48 pointers_(pointers), |
| 49 deoptimization_index_(deoptimization_index) { } | 49 deopt_mode_(mode) { } |
| 50 virtual ~SafepointGenerator() { } | 50 virtual ~SafepointGenerator() { } |
| 51 | 51 |
| 52 virtual void BeforeCall(int call_size) const { | 52 virtual void BeforeCall(int call_size) const { } |
| 53 ASSERT(call_size >= 0); | |
| 54 // Ensure that we have enough space after the previous safepoint position | |
| 55 // for the jump generated there. | |
| 56 int call_end = codegen_->masm()->pc_offset() + call_size; | |
| 57 int prev_jump_end = codegen_->LastSafepointEnd() + kMinSafepointSize; | |
| 58 if (call_end < prev_jump_end) { | |
| 59 int padding_size = prev_jump_end - call_end; | |
| 60 STATIC_ASSERT(kMinSafepointSize <= 9); // One multibyte nop is enough. | |
| 61 codegen_->masm()->nop(padding_size); | |
| 62 } | |
| 63 } | |
| 64 | 53 |
| 65 virtual void AfterCall() const { | 54 virtual void AfterCall() const { |
| 66 codegen_->RecordSafepoint(pointers_, deoptimization_index_); | 55 codegen_->RecordSafepoint(pointers_, deopt_mode_); |
| 67 } | 56 } |
| 68 | 57 |
| 69 private: | 58 private: |
| 70 static const int kMinSafepointSize = | |
| 71 MacroAssembler::kShortCallInstructionLength; | |
| 72 LCodeGen* codegen_; | 59 LCodeGen* codegen_; |
| 73 LPointerMap* pointers_; | 60 LPointerMap* pointers_; |
| 74 int deoptimization_index_; | 61 Safepoint::DeoptMode deopt_mode_; |
| 75 }; | 62 }; |
| 76 | 63 |
| 77 | 64 |
| 78 #define __ masm()-> | 65 #define __ masm()-> |
| 79 | 66 |
| 80 bool LCodeGen::GenerateCode() { | 67 bool LCodeGen::GenerateCode() { |
| 81 HPhase phase("Code generation", chunk()); | 68 HPhase phase("Code generation", chunk()); |
| 82 ASSERT(is_unused()); | 69 ASSERT(is_unused()); |
| 83 status_ = GENERATING; | 70 status_ = GENERATING; |
| 84 | 71 |
| 85 // Open a frame scope to indicate that there is a frame on the stack. The | 72 // Open a frame scope to indicate that there is a frame on the stack. The |
| 86 // MANUAL indicates that the scope shouldn't actually generate code to set up | 73 // MANUAL indicates that the scope shouldn't actually generate code to set up |
| 87 // the frame (that is done in GeneratePrologue). | 74 // the frame (that is done in GeneratePrologue). |
| 88 FrameScope frame_scope(masm_, StackFrame::MANUAL); | 75 FrameScope frame_scope(masm_, StackFrame::MANUAL); |
| 89 | 76 |
| 90 return GeneratePrologue() && | 77 return GeneratePrologue() && |
| 91 GenerateBody() && | 78 GenerateBody() && |
| 92 GenerateDeferredCode() && | 79 GenerateDeferredCode() && |
| 93 GenerateJumpTable() && | 80 GenerateJumpTable() && |
| 94 GenerateSafepointTable(); | 81 GenerateSafepointTable(); |
| 95 } | 82 } |
| 96 | 83 |
| 97 | 84 |
| 98 void LCodeGen::FinishCode(Handle<Code> code) { | 85 void LCodeGen::FinishCode(Handle<Code> code) { |
| 99 ASSERT(is_done()); | 86 ASSERT(is_done()); |
| 100 code->set_stack_slots(GetStackSlotCount()); | 87 code->set_stack_slots(GetStackSlotCount()); |
| 101 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); | 88 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); |
| 102 PopulateDeoptimizationData(code); | 89 PopulateDeoptimizationData(code); |
| 103 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code); | |
| 104 } | 90 } |
| 105 | 91 |
| 106 | 92 |
| 107 void LCodeGen::Abort(const char* format, ...) { | 93 void LCodeGen::Abort(const char* format, ...) { |
| 108 if (FLAG_trace_bailout) { | 94 if (FLAG_trace_bailout) { |
| 109 SmartArrayPointer<char> name( | 95 SmartArrayPointer<char> name( |
| 110 info()->shared_info()->DebugName()->ToCString()); | 96 info()->shared_info()->DebugName()->ToCString()); |
| 111 PrintF("Aborting LCodeGen in @\"%s\": ", *name); | 97 PrintF("Aborting LCodeGen in @\"%s\": ", *name); |
| 112 va_list arguments; | 98 va_list arguments; |
| 113 va_start(arguments, format); | 99 va_start(arguments, format); |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 199 if (heap_slots > 0) { | 185 if (heap_slots > 0) { |
| 200 Comment(";;; Allocate local context"); | 186 Comment(";;; Allocate local context"); |
| 201 // Argument to NewContext is the function, which is still in rdi. | 187 // Argument to NewContext is the function, which is still in rdi. |
| 202 __ push(rdi); | 188 __ push(rdi); |
| 203 if (heap_slots <= FastNewContextStub::kMaximumSlots) { | 189 if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
| 204 FastNewContextStub stub(heap_slots); | 190 FastNewContextStub stub(heap_slots); |
| 205 __ CallStub(&stub); | 191 __ CallStub(&stub); |
| 206 } else { | 192 } else { |
| 207 __ CallRuntime(Runtime::kNewFunctionContext, 1); | 193 __ CallRuntime(Runtime::kNewFunctionContext, 1); |
| 208 } | 194 } |
| 209 RecordSafepoint(Safepoint::kNoDeoptimizationIndex); | 195 RecordSafepoint(Safepoint::kNoLazyDeopt); |
| 210 // Context is returned in both rax and rsi. It replaces the context | 196 // Context is returned in both rax and rsi. It replaces the context |
| 211 // passed to us. It's saved in the stack and kept live in rsi. | 197 // passed to us. It's saved in the stack and kept live in rsi. |
| 212 __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); | 198 __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); |
| 213 | 199 |
| 214 // Copy any necessary parameters into the context. | 200 // Copy any necessary parameters into the context. |
| 215 int num_parameters = scope()->num_parameters(); | 201 int num_parameters = scope()->num_parameters(); |
| 216 for (int i = 0; i < num_parameters; i++) { | 202 for (int i = 0; i < num_parameters; i++) { |
| 217 Variable* var = scope()->parameter(i); | 203 Variable* var = scope()->parameter(i); |
| 218 if (var->IsContextSlot()) { | 204 if (var->IsContextSlot()) { |
| 219 int parameter_offset = StandardFrameConstants::kCallerSPOffset + | 205 int parameter_offset = StandardFrameConstants::kCallerSPOffset + |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 252 | 238 |
| 253 if (emit_instructions) { | 239 if (emit_instructions) { |
| 254 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic()); | 240 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic()); |
| 255 instr->CompileToNative(this); | 241 instr->CompileToNative(this); |
| 256 } | 242 } |
| 257 } | 243 } |
| 258 return !is_aborted(); | 244 return !is_aborted(); |
| 259 } | 245 } |
| 260 | 246 |
| 261 | 247 |
| 262 LInstruction* LCodeGen::GetNextInstruction() { | |
| 263 if (current_instruction_ < instructions_->length() - 1) { | |
| 264 return instructions_->at(current_instruction_ + 1); | |
| 265 } else { | |
| 266 return NULL; | |
| 267 } | |
| 268 } | |
| 269 | |
| 270 | |
| 271 bool LCodeGen::GenerateJumpTable() { | 248 bool LCodeGen::GenerateJumpTable() { |
| 272 for (int i = 0; i < jump_table_.length(); i++) { | 249 for (int i = 0; i < jump_table_.length(); i++) { |
| 273 __ bind(&jump_table_[i].label); | 250 __ bind(&jump_table_[i].label); |
| 274 __ Jump(jump_table_[i].address, RelocInfo::RUNTIME_ENTRY); | 251 __ Jump(jump_table_[i].address, RelocInfo::RUNTIME_ENTRY); |
| 275 } | 252 } |
| 276 return !is_aborted(); | 253 return !is_aborted(); |
| 277 } | 254 } |
| 278 | 255 |
| 279 | 256 |
| 280 bool LCodeGen::GenerateDeferredCode() { | 257 bool LCodeGen::GenerateDeferredCode() { |
| 281 ASSERT(is_generating()); | 258 ASSERT(is_generating()); |
| 282 if (deferred_.length() > 0) { | 259 if (deferred_.length() > 0) { |
| 283 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { | 260 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { |
| 284 LDeferredCode* code = deferred_[i]; | 261 LDeferredCode* code = deferred_[i]; |
| 285 __ bind(code->entry()); | 262 __ bind(code->entry()); |
| 286 Comment(";;; Deferred code @%d: %s.", | 263 Comment(";;; Deferred code @%d: %s.", |
| 287 code->instruction_index(), | 264 code->instruction_index(), |
| 288 code->instr()->Mnemonic()); | 265 code->instr()->Mnemonic()); |
| 289 code->Generate(); | 266 code->Generate(); |
| 290 __ jmp(code->exit()); | 267 __ jmp(code->exit()); |
| 291 } | 268 } |
| 292 | |
| 293 // Pad code to ensure that the last piece of deferred code have | |
| 294 // room for lazy bailout. | |
| 295 while ((masm()->pc_offset() - LastSafepointEnd()) | |
| 296 < Deoptimizer::patch_size()) { | |
| 297 int padding = masm()->pc_offset() - LastSafepointEnd(); | |
| 298 if (padding > 9) { | |
| 299 __ nop(9); | |
| 300 } else { | |
| 301 __ nop(padding); | |
| 302 } | |
| 303 } | |
| 304 } | 269 } |
| 305 | 270 |
| 306 // Deferred code is the last part of the instruction sequence. Mark | 271 // Deferred code is the last part of the instruction sequence. Mark |
| 307 // the generated code as done unless we bailed out. | 272 // the generated code as done unless we bailed out. |
| 308 if (!is_aborted()) status_ = DONE; | 273 if (!is_aborted()) status_ = DONE; |
| 309 return !is_aborted(); | 274 return !is_aborted(); |
| 310 } | 275 } |
| 311 | 276 |
| 312 | 277 |
| 313 bool LCodeGen::GenerateSafepointTable() { | 278 bool LCodeGen::GenerateSafepointTable() { |
| 314 ASSERT(is_done()); | 279 ASSERT(is_done()); |
| 315 // Ensure that there is space at the end of the code to write a number | |
| 316 // of jump instructions, as well as to afford writing a call near the end | |
| 317 // of the code. | |
| 318 // The jumps are used when there isn't room in the code stream to write | |
| 319 // a long call instruction. Instead it writes a shorter call to a | |
| 320 // jump instruction in the same code object. | |
| 321 // The calls are used when lazy deoptimizing a function and calls to a | |
| 322 // deoptimization function. | |
| 323 int short_deopts = safepoints_.CountShortDeoptimizationIntervals( | |
| 324 static_cast<unsigned>(MacroAssembler::kJumpInstructionLength)); | |
| 325 int byte_count = (short_deopts) * MacroAssembler::kJumpInstructionLength; | |
| 326 while (byte_count-- > 0) { | |
| 327 __ int3(); | |
| 328 } | |
| 329 safepoints_.Emit(masm(), GetStackSlotCount()); | 280 safepoints_.Emit(masm(), GetStackSlotCount()); |
| 330 return !is_aborted(); | 281 return !is_aborted(); |
| 331 } | 282 } |
| 332 | 283 |
| 333 | 284 |
| 334 Register LCodeGen::ToRegister(int index) const { | 285 Register LCodeGen::ToRegister(int index) const { |
| 335 return Register::FromAllocationIndex(index); | 286 return Register::FromAllocationIndex(index); |
| 336 } | 287 } |
| 337 | 288 |
| 338 | 289 |
| (...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 484 | 435 |
| 485 void LCodeGen::CallCodeGeneric(Handle<Code> code, | 436 void LCodeGen::CallCodeGeneric(Handle<Code> code, |
| 486 RelocInfo::Mode mode, | 437 RelocInfo::Mode mode, |
| 487 LInstruction* instr, | 438 LInstruction* instr, |
| 488 SafepointMode safepoint_mode, | 439 SafepointMode safepoint_mode, |
| 489 int argc) { | 440 int argc) { |
| 490 ASSERT(instr != NULL); | 441 ASSERT(instr != NULL); |
| 491 LPointerMap* pointers = instr->pointer_map(); | 442 LPointerMap* pointers = instr->pointer_map(); |
| 492 RecordPosition(pointers->position()); | 443 RecordPosition(pointers->position()); |
| 493 __ call(code, mode); | 444 __ call(code, mode); |
| 494 RegisterLazyDeoptimization(instr, safepoint_mode, argc); | 445 RecordSafepointWithLazyDeopt(instr, safepoint_mode, argc); |
| 495 | 446 |
| 496 // Signal that we don't inline smi code before these stubs in the | 447 // Signal that we don't inline smi code before these stubs in the |
| 497 // optimizing code generator. | 448 // optimizing code generator. |
| 498 if (code->kind() == Code::BINARY_OP_IC || | 449 if (code->kind() == Code::BINARY_OP_IC || |
| 499 code->kind() == Code::COMPARE_IC) { | 450 code->kind() == Code::COMPARE_IC) { |
| 500 __ nop(); | 451 __ nop(); |
| 501 } | 452 } |
| 502 } | 453 } |
| 503 | 454 |
| 504 | 455 |
| 505 void LCodeGen::CallCode(Handle<Code> code, | 456 void LCodeGen::CallCode(Handle<Code> code, |
| 506 RelocInfo::Mode mode, | 457 RelocInfo::Mode mode, |
| 507 LInstruction* instr) { | 458 LInstruction* instr) { |
| 508 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, 0); | 459 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, 0); |
| 509 } | 460 } |
| 510 | 461 |
| 511 | 462 |
| 512 void LCodeGen::CallRuntime(const Runtime::Function* function, | 463 void LCodeGen::CallRuntime(const Runtime::Function* function, |
| 513 int num_arguments, | 464 int num_arguments, |
| 514 LInstruction* instr) { | 465 LInstruction* instr) { |
| 515 ASSERT(instr != NULL); | 466 ASSERT(instr != NULL); |
| 516 ASSERT(instr->HasPointerMap()); | 467 ASSERT(instr->HasPointerMap()); |
| 517 LPointerMap* pointers = instr->pointer_map(); | 468 LPointerMap* pointers = instr->pointer_map(); |
| 518 RecordPosition(pointers->position()); | 469 RecordPosition(pointers->position()); |
| 519 | 470 |
| 520 __ CallRuntime(function, num_arguments); | 471 __ CallRuntime(function, num_arguments); |
| 521 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT, 0); | 472 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0); |
| 522 } | 473 } |
| 523 | 474 |
| 524 | 475 |
| 525 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, | 476 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, |
| 526 int argc, | 477 int argc, |
| 527 LInstruction* instr) { | 478 LInstruction* instr) { |
| 528 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 479 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 529 __ CallRuntimeSaveDoubles(id); | 480 __ CallRuntimeSaveDoubles(id); |
| 530 RecordSafepointWithRegisters( | 481 RecordSafepointWithRegisters( |
| 531 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex); | 482 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt); |
| 532 } | 483 } |
| 533 | 484 |
| 534 | 485 |
| 535 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr, | 486 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment, |
| 536 SafepointMode safepoint_mode, | 487 Safepoint::DeoptMode mode) { |
| 537 int argc) { | |
| 538 // Create the environment to bailout to. If the call has side effects | |
| 539 // execution has to continue after the call otherwise execution can continue | |
| 540 // from a previous bailout point repeating the call. | |
| 541 LEnvironment* deoptimization_environment; | |
| 542 if (instr->HasDeoptimizationEnvironment()) { | |
| 543 deoptimization_environment = instr->deoptimization_environment(); | |
| 544 } else { | |
| 545 deoptimization_environment = instr->environment(); | |
| 546 } | |
| 547 | |
| 548 RegisterEnvironmentForDeoptimization(deoptimization_environment); | |
| 549 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { | |
| 550 ASSERT(argc == 0); | |
| 551 RecordSafepoint(instr->pointer_map(), | |
| 552 deoptimization_environment->deoptimization_index()); | |
| 553 } else { | |
| 554 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS); | |
| 555 RecordSafepointWithRegisters( | |
| 556 instr->pointer_map(), | |
| 557 argc, | |
| 558 deoptimization_environment->deoptimization_index()); | |
| 559 } | |
| 560 } | |
| 561 | |
| 562 | |
| 563 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) { | |
| 564 if (!environment->HasBeenRegistered()) { | 488 if (!environment->HasBeenRegistered()) { |
| 565 // Physical stack frame layout: | 489 // Physical stack frame layout: |
| 566 // -x ............. -4 0 ..................................... y | 490 // -x ............. -4 0 ..................................... y |
| 567 // [incoming arguments] [spill slots] [pushed outgoing arguments] | 491 // [incoming arguments] [spill slots] [pushed outgoing arguments] |
| 568 | 492 |
| 569 // Layout of the environment: | 493 // Layout of the environment: |
| 570 // 0 ..................................................... size-1 | 494 // 0 ..................................................... size-1 |
| 571 // [parameters] [locals] [expression stack including arguments] | 495 // [parameters] [locals] [expression stack including arguments] |
| 572 | 496 |
| 573 // Layout of the translation: | 497 // Layout of the translation: |
| 574 // 0 ........................................................ size - 1 + 4 | 498 // 0 ........................................................ size - 1 + 4 |
| 575 // [expression stack including arguments] [locals] [4 words] [parameters] | 499 // [expression stack including arguments] [locals] [4 words] [parameters] |
| 576 // |>------------ translation_size ------------<| | 500 // |>------------ translation_size ------------<| |
| 577 | 501 |
| 578 int frame_count = 0; | 502 int frame_count = 0; |
| 579 for (LEnvironment* e = environment; e != NULL; e = e->outer()) { | 503 for (LEnvironment* e = environment; e != NULL; e = e->outer()) { |
| 580 ++frame_count; | 504 ++frame_count; |
| 581 } | 505 } |
| 582 Translation translation(&translations_, frame_count); | 506 Translation translation(&translations_, frame_count); |
| 583 WriteTranslation(environment, &translation); | 507 WriteTranslation(environment, &translation); |
| 584 int deoptimization_index = deoptimizations_.length(); | 508 int deoptimization_index = deoptimizations_.length(); |
| 585 environment->Register(deoptimization_index, translation.index()); | 509 int pc_offset = masm()->pc_offset(); |
| 510 environment->Register(deoptimization_index, | |
| 511 translation.index(), | |
| 512 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); | |
| 586 deoptimizations_.Add(environment); | 513 deoptimizations_.Add(environment); |
| 587 } | 514 } |
| 588 } | 515 } |
| 589 | 516 |
| 590 | 517 |
| 591 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { | 518 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { |
| 592 RegisterEnvironmentForDeoptimization(environment); | 519 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
| 593 ASSERT(environment->HasBeenRegistered()); | 520 ASSERT(environment->HasBeenRegistered()); |
| 594 int id = environment->deoptimization_index(); | 521 int id = environment->deoptimization_index(); |
| 595 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER); | 522 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER); |
| 596 ASSERT(entry != NULL); | 523 ASSERT(entry != NULL); |
| 597 if (entry == NULL) { | 524 if (entry == NULL) { |
| 598 Abort("bailout was not prepared"); | 525 Abort("bailout was not prepared"); |
| 599 return; | 526 return; |
| 600 } | 527 } |
| 601 | 528 |
| 602 if (cc == no_condition) { | 529 if (cc == no_condition) { |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 634 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id())); | 561 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id())); |
| 635 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_)); | 562 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_)); |
| 636 | 563 |
| 637 // Populate the deoptimization entries. | 564 // Populate the deoptimization entries. |
| 638 for (int i = 0; i < length; i++) { | 565 for (int i = 0; i < length; i++) { |
| 639 LEnvironment* env = deoptimizations_[i]; | 566 LEnvironment* env = deoptimizations_[i]; |
| 640 data->SetAstId(i, Smi::FromInt(env->ast_id())); | 567 data->SetAstId(i, Smi::FromInt(env->ast_id())); |
| 641 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index())); | 568 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index())); |
| 642 data->SetArgumentsStackHeight(i, | 569 data->SetArgumentsStackHeight(i, |
| 643 Smi::FromInt(env->arguments_stack_height())); | 570 Smi::FromInt(env->arguments_stack_height())); |
| 571 data->SetPc(i, Smi::FromInt(env->pc_offset())); | |
| 644 } | 572 } |
| 645 code->set_deoptimization_data(*data); | 573 code->set_deoptimization_data(*data); |
| 646 } | 574 } |
| 647 | 575 |
| 648 | 576 |
| 649 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) { | 577 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) { |
| 650 int result = deoptimization_literals_.length(); | 578 int result = deoptimization_literals_.length(); |
| 651 for (int i = 0; i < deoptimization_literals_.length(); ++i) { | 579 for (int i = 0; i < deoptimization_literals_.length(); ++i) { |
| 652 if (deoptimization_literals_[i].is_identical_to(literal)) return i; | 580 if (deoptimization_literals_[i].is_identical_to(literal)) return i; |
| 653 } | 581 } |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 665 for (int i = 0, length = inlined_closures->length(); | 593 for (int i = 0, length = inlined_closures->length(); |
| 666 i < length; | 594 i < length; |
| 667 i++) { | 595 i++) { |
| 668 DefineDeoptimizationLiteral(inlined_closures->at(i)); | 596 DefineDeoptimizationLiteral(inlined_closures->at(i)); |
| 669 } | 597 } |
| 670 | 598 |
| 671 inlined_function_count_ = deoptimization_literals_.length(); | 599 inlined_function_count_ = deoptimization_literals_.length(); |
| 672 } | 600 } |
| 673 | 601 |
| 674 | 602 |
| 603 void LCodeGen::RecordSafepointWithLazyDeopt( | |
| 604 LInstruction* instr, SafepointMode safepoint_mode, int argc) { | |
| 605 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { | |
| 606 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt); | |
| 607 } else { | |
| 608 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS); | |
| 609 RecordSafepointWithRegisters( | |
| 610 instr->pointer_map(), argc, Safepoint::kLazyDeopt); | |
| 611 } | |
| 612 } | |
| 613 | |
| 614 | |
| 675 void LCodeGen::RecordSafepoint( | 615 void LCodeGen::RecordSafepoint( |
| 676 LPointerMap* pointers, | 616 LPointerMap* pointers, |
| 677 Safepoint::Kind kind, | 617 Safepoint::Kind kind, |
| 678 int arguments, | 618 int arguments, |
| 679 int deoptimization_index) { | 619 Safepoint::DeoptMode deopt_mode) { |
| 680 ASSERT(kind == expected_safepoint_kind_); | 620 ASSERT(kind == expected_safepoint_kind_); |
| 681 | 621 |
| 682 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands(); | 622 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands(); |
| 683 | 623 |
| 684 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), | 624 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), |
| 685 kind, arguments, deoptimization_index); | 625 kind, arguments, deopt_mode); |
| 686 for (int i = 0; i < operands->length(); i++) { | 626 for (int i = 0; i < operands->length(); i++) { |
| 687 LOperand* pointer = operands->at(i); | 627 LOperand* pointer = operands->at(i); |
| 688 if (pointer->IsStackSlot()) { | 628 if (pointer->IsStackSlot()) { |
| 689 safepoint.DefinePointerSlot(pointer->index()); | 629 safepoint.DefinePointerSlot(pointer->index()); |
| 690 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { | 630 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { |
| 691 safepoint.DefinePointerRegister(ToRegister(pointer)); | 631 safepoint.DefinePointerRegister(ToRegister(pointer)); |
| 692 } | 632 } |
| 693 } | 633 } |
| 694 if (kind & Safepoint::kWithRegisters) { | 634 if (kind & Safepoint::kWithRegisters) { |
| 695 // Register rsi always contains a pointer to the context. | 635 // Register rsi always contains a pointer to the context. |
| 696 safepoint.DefinePointerRegister(rsi); | 636 safepoint.DefinePointerRegister(rsi); |
| 697 } | 637 } |
| 698 } | 638 } |
| 699 | 639 |
| 700 | 640 |
| 701 void LCodeGen::RecordSafepoint(LPointerMap* pointers, | 641 void LCodeGen::RecordSafepoint(LPointerMap* pointers, |
| 702 int deoptimization_index) { | 642 Safepoint::DeoptMode deopt_mode) { |
| 703 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index); | 643 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode); |
| 704 } | 644 } |
| 705 | 645 |
| 706 | 646 |
| 707 void LCodeGen::RecordSafepoint(int deoptimization_index) { | 647 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) { |
| 708 LPointerMap empty_pointers(RelocInfo::kNoPosition); | 648 LPointerMap empty_pointers(RelocInfo::kNoPosition); |
| 709 RecordSafepoint(&empty_pointers, deoptimization_index); | 649 RecordSafepoint(&empty_pointers, deopt_mode); |
| 710 } | 650 } |
| 711 | 651 |
| 712 | 652 |
| 713 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, | 653 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, |
| 714 int arguments, | 654 int arguments, |
| 715 int deoptimization_index) { | 655 Safepoint::DeoptMode deopt_mode) { |
| 716 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, | 656 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, deopt_mode); |
| 717 deoptimization_index); | |
| 718 } | 657 } |
| 719 | 658 |
| 720 | 659 |
| 721 void LCodeGen::RecordPosition(int position) { | 660 void LCodeGen::RecordPosition(int position) { |
| 722 if (position == RelocInfo::kNoPosition) return; | 661 if (position == RelocInfo::kNoPosition) return; |
| 723 masm()->positions_recorder()->RecordPosition(position); | 662 masm()->positions_recorder()->RecordPosition(position); |
| 724 } | 663 } |
| 725 | 664 |
| 726 | 665 |
| 727 void LCodeGen::DoLabel(LLabel* label) { | 666 void LCodeGen::DoLabel(LLabel* label) { |
| (...skipping 14 matching lines...) Expand all Loading... | |
| 742 | 681 |
| 743 | 682 |
| 744 void LCodeGen::DoGap(LGap* gap) { | 683 void LCodeGen::DoGap(LGap* gap) { |
| 745 for (int i = LGap::FIRST_INNER_POSITION; | 684 for (int i = LGap::FIRST_INNER_POSITION; |
| 746 i <= LGap::LAST_INNER_POSITION; | 685 i <= LGap::LAST_INNER_POSITION; |
| 747 i++) { | 686 i++) { |
| 748 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i); | 687 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i); |
| 749 LParallelMove* move = gap->GetParallelMove(inner_pos); | 688 LParallelMove* move = gap->GetParallelMove(inner_pos); |
| 750 if (move != NULL) DoParallelMove(move); | 689 if (move != NULL) DoParallelMove(move); |
| 751 } | 690 } |
| 752 | |
| 753 LInstruction* next = GetNextInstruction(); | |
| 754 if (next != NULL && next->IsLazyBailout()) { | |
| 755 int pc = masm()->pc_offset(); | |
| 756 safepoints_.SetPcAfterGap(pc); | |
| 757 } | |
| 758 } | 691 } |
| 759 | 692 |
| 760 | 693 |
| 761 void LCodeGen::DoInstructionGap(LInstructionGap* instr) { | 694 void LCodeGen::DoInstructionGap(LInstructionGap* instr) { |
| 762 DoGap(instr); | 695 DoGap(instr); |
| 763 } | 696 } |
| 764 | 697 |
| 765 | 698 |
| 766 void LCodeGen::DoParameter(LParameter* instr) { | 699 void LCodeGen::DoParameter(LParameter* instr) { |
| 767 // Nothing to do. | 700 // Nothing to do. |
| (...skipping 1114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1882 } | 1815 } |
| 1883 | 1816 |
| 1884 | 1817 |
| 1885 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { | 1818 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { |
| 1886 class DeferredInstanceOfKnownGlobal: public LDeferredCode { | 1819 class DeferredInstanceOfKnownGlobal: public LDeferredCode { |
| 1887 public: | 1820 public: |
| 1888 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, | 1821 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, |
| 1889 LInstanceOfKnownGlobal* instr) | 1822 LInstanceOfKnownGlobal* instr) |
| 1890 : LDeferredCode(codegen), instr_(instr) { } | 1823 : LDeferredCode(codegen), instr_(instr) { } |
| 1891 virtual void Generate() { | 1824 virtual void Generate() { |
| 1892 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_); | 1825 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_); |
| 1893 } | 1826 } |
| 1894 virtual LInstruction* instr() { return instr_; } | 1827 virtual LInstruction* instr() { return instr_; } |
| 1895 Label* map_check() { return &map_check_; } | 1828 Label* map_check() { return &map_check_; } |
| 1896 private: | 1829 private: |
| 1897 LInstanceOfKnownGlobal* instr_; | 1830 LInstanceOfKnownGlobal* instr_; |
| 1898 Label map_check_; | 1831 Label map_check_; |
| 1899 }; | 1832 }; |
| 1900 | 1833 |
| 1901 | 1834 |
| 1902 DeferredInstanceOfKnownGlobal* deferred; | 1835 DeferredInstanceOfKnownGlobal* deferred; |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1940 __ JumpIfNotString(object, kScratchRegister, deferred->entry()); | 1873 __ JumpIfNotString(object, kScratchRegister, deferred->entry()); |
| 1941 | 1874 |
| 1942 __ bind(&false_result); | 1875 __ bind(&false_result); |
| 1943 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); | 1876 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); |
| 1944 | 1877 |
| 1945 __ bind(deferred->exit()); | 1878 __ bind(deferred->exit()); |
| 1946 __ bind(&done); | 1879 __ bind(&done); |
| 1947 } | 1880 } |
| 1948 | 1881 |
| 1949 | 1882 |
| 1950 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, | 1883 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, |
| 1951 Label* map_check) { | 1884 Label* map_check) { |
| 1952 { | 1885 { |
| 1953 PushSafepointRegistersScope scope(this); | 1886 PushSafepointRegistersScope scope(this); |
| 1954 InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>( | 1887 InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>( |
| 1955 InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck); | 1888 InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck); |
| 1956 InstanceofStub stub(flags); | 1889 InstanceofStub stub(flags); |
| 1957 | 1890 |
| 1958 __ push(ToRegister(instr->InputAt(0))); | 1891 __ push(ToRegister(instr->InputAt(0))); |
| 1959 __ Push(instr->function()); | 1892 __ Push(instr->function()); |
| 1960 | 1893 |
| 1961 static const int kAdditionalDelta = 10; | 1894 static const int kAdditionalDelta = 10; |
| 1962 int delta = | 1895 int delta = |
| 1963 masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; | 1896 masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; |
| 1964 ASSERT(delta >= 0); | 1897 ASSERT(delta >= 0); |
| 1965 __ push_imm32(delta); | 1898 __ push_imm32(delta); |
| 1966 | 1899 |
| 1967 // We are pushing three values on the stack but recording a | 1900 // We are pushing three values on the stack but recording a |
| 1968 // safepoint with two arguments because stub is going to | 1901 // safepoint with two arguments because stub is going to |
| 1969 // remove the third argument from the stack before jumping | 1902 // remove the third argument from the stack before jumping |
| 1970 // to instanceof builtin on the slow path. | 1903 // to instanceof builtin on the slow path. |
| 1971 CallCodeGeneric(stub.GetCode(), | 1904 CallCodeGeneric(stub.GetCode(), |
| 1972 RelocInfo::CODE_TARGET, | 1905 RelocInfo::CODE_TARGET, |
| 1973 instr, | 1906 instr, |
| 1974 RECORD_SAFEPOINT_WITH_REGISTERS, | 1907 RECORD_SAFEPOINT_WITH_REGISTERS, |
| 1975 2); | 1908 2); |
| 1976 ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check)); | 1909 ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check)); |
| 1910 ASSERT(instr->HasDeoptimizationEnvironment()); | |
| 1911 LEnvironment* env = instr->deoptimization_environment(); | |
| 1912 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | |
| 1977 // Move result to a register that survives the end of the | 1913 // Move result to a register that survives the end of the |
| 1978 // PushSafepointRegisterScope. | 1914 // PushSafepointRegisterScope. |
| 1979 __ movq(kScratchRegister, rax); | 1915 __ movq(kScratchRegister, rax); |
| 1980 } | 1916 } |
| 1981 __ testq(kScratchRegister, kScratchRegister); | 1917 __ testq(kScratchRegister, kScratchRegister); |
| 1982 Label load_false; | 1918 Label load_false; |
| 1983 Label done; | 1919 Label done; |
| 1984 __ j(not_zero, &load_false); | 1920 __ j(not_zero, &load_false); |
| 1985 __ LoadRoot(rax, Heap::kTrueValueRootIndex); | 1921 __ LoadRoot(rax, Heap::kTrueValueRootIndex); |
| 1986 __ jmp(&done); | 1922 __ jmp(&done); |
| (...skipping 575 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2562 __ j(zero, &invoke, Label::kNear); | 2498 __ j(zero, &invoke, Label::kNear); |
| 2563 __ bind(&loop); | 2499 __ bind(&loop); |
| 2564 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize)); | 2500 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize)); |
| 2565 __ decl(length); | 2501 __ decl(length); |
| 2566 __ j(not_zero, &loop); | 2502 __ j(not_zero, &loop); |
| 2567 | 2503 |
| 2568 // Invoke the function. | 2504 // Invoke the function. |
| 2569 __ bind(&invoke); | 2505 __ bind(&invoke); |
| 2570 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 2506 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); |
| 2571 LPointerMap* pointers = instr->pointer_map(); | 2507 LPointerMap* pointers = instr->pointer_map(); |
| 2572 LEnvironment* env = instr->deoptimization_environment(); | |
| 2573 RecordPosition(pointers->position()); | 2508 RecordPosition(pointers->position()); |
| 2574 RegisterEnvironmentForDeoptimization(env); | 2509 SafepointGenerator safepoint_generator( |
| 2575 SafepointGenerator safepoint_generator(this, | 2510 this, pointers, Safepoint::kLazyDeopt); |
| 2576 pointers, | |
| 2577 env->deoptimization_index()); | |
| 2578 v8::internal::ParameterCount actual(rax); | 2511 v8::internal::ParameterCount actual(rax); |
| 2579 __ InvokeFunction(function, actual, CALL_FUNCTION, | 2512 __ InvokeFunction(function, actual, CALL_FUNCTION, |
| 2580 safepoint_generator, CALL_AS_METHOD); | 2513 safepoint_generator, CALL_AS_METHOD); |
| 2581 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 2514 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 2582 } | 2515 } |
| 2583 | 2516 |
| 2584 | 2517 |
| 2585 void LCodeGen::DoPushArgument(LPushArgument* instr) { | 2518 void LCodeGen::DoPushArgument(LPushArgument* instr) { |
| 2586 LOperand* argument = instr->InputAt(0); | 2519 LOperand* argument = instr->InputAt(0); |
| 2587 EmitPushTaggedOperand(argument); | 2520 EmitPushTaggedOperand(argument); |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2645 | 2578 |
| 2646 // Invoke function. | 2579 // Invoke function. |
| 2647 __ SetCallKind(rcx, call_kind); | 2580 __ SetCallKind(rcx, call_kind); |
| 2648 if (*function == *info()->closure()) { | 2581 if (*function == *info()->closure()) { |
| 2649 __ CallSelf(); | 2582 __ CallSelf(); |
| 2650 } else { | 2583 } else { |
| 2651 __ call(FieldOperand(rdi, JSFunction::kCodeEntryOffset)); | 2584 __ call(FieldOperand(rdi, JSFunction::kCodeEntryOffset)); |
| 2652 } | 2585 } |
| 2653 | 2586 |
| 2654 // Setup deoptimization. | 2587 // Setup deoptimization. |
| 2655 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT, 0); | 2588 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0); |
| 2656 | 2589 |
| 2657 // Restore context. | 2590 // Restore context. |
| 2658 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 2591 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 2659 } | 2592 } |
| 2660 | 2593 |
| 2661 | 2594 |
| 2662 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { | 2595 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { |
| 2663 ASSERT(ToRegister(instr->result()).is(rax)); | 2596 ASSERT(ToRegister(instr->result()).is(rax)); |
| 2664 __ Move(rdi, instr->function()); | 2597 __ Move(rdi, instr->function()); |
| 2665 CallKnownFunction(instr->function(), | 2598 CallKnownFunction(instr->function(), |
| (...skipping 327 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2993 UNREACHABLE(); | 2926 UNREACHABLE(); |
| 2994 } | 2927 } |
| 2995 } | 2928 } |
| 2996 | 2929 |
| 2997 | 2930 |
| 2998 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { | 2931 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { |
| 2999 ASSERT(ToRegister(instr->function()).is(rdi)); | 2932 ASSERT(ToRegister(instr->function()).is(rdi)); |
| 3000 ASSERT(instr->HasPointerMap()); | 2933 ASSERT(instr->HasPointerMap()); |
| 3001 ASSERT(instr->HasDeoptimizationEnvironment()); | 2934 ASSERT(instr->HasDeoptimizationEnvironment()); |
| 3002 LPointerMap* pointers = instr->pointer_map(); | 2935 LPointerMap* pointers = instr->pointer_map(); |
| 3003 LEnvironment* env = instr->deoptimization_environment(); | |
| 3004 RecordPosition(pointers->position()); | 2936 RecordPosition(pointers->position()); |
| 3005 RegisterEnvironmentForDeoptimization(env); | 2937 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); |
| 3006 SafepointGenerator generator(this, pointers, env->deoptimization_index()); | |
| 3007 ParameterCount count(instr->arity()); | 2938 ParameterCount count(instr->arity()); |
| 3008 __ InvokeFunction(rdi, count, CALL_FUNCTION, generator, CALL_AS_METHOD); | 2939 __ InvokeFunction(rdi, count, CALL_FUNCTION, generator, CALL_AS_METHOD); |
| 3009 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 2940 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 3010 } | 2941 } |
| 3011 | 2942 |
| 3012 | 2943 |
| 3013 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { | 2944 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { |
| 3014 ASSERT(ToRegister(instr->key()).is(rcx)); | 2945 ASSERT(ToRegister(instr->key()).is(rcx)); |
| 3015 ASSERT(ToRegister(instr->result()).is(rax)); | 2946 ASSERT(ToRegister(instr->result()).is(rax)); |
| 3016 | 2947 |
| (...skipping 1158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4175 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset)); | 4106 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset)); |
| 4176 | 4107 |
| 4177 // Check the marker in the calling frame. | 4108 // Check the marker in the calling frame. |
| 4178 __ bind(&check_frame_marker); | 4109 __ bind(&check_frame_marker); |
| 4179 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), | 4110 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), |
| 4180 Smi::FromInt(StackFrame::CONSTRUCT)); | 4111 Smi::FromInt(StackFrame::CONSTRUCT)); |
| 4181 } | 4112 } |
| 4182 | 4113 |
| 4183 | 4114 |
| 4184 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 4115 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
| 4185 // No code for lazy bailout instruction. Used to capture environment after a | 4116 // Ensure that we have enough space after the previous lazy-deoopt position |
| 4186 // call for populating the safepoint data with deoptimization data. | 4117 // for the jump generated there. |
| 4118 int current_pc = masm()->pc_offset(); | |
| 4119 int patch_size = Deoptimizer::patch_size(); | |
| 4120 if (current_pc < last_lazy_deopt_pc_ + patch_size) { | |
| 4121 __ nop(last_lazy_deopt_pc_ + patch_size - current_pc); | |
| 4122 } | |
| 4123 last_lazy_deopt_pc_ = current_pc; | |
| 4124 | |
| 4125 ASSERT(instr->HasEnvironment()); | |
| 4126 LEnvironment* env = instr->environment(); | |
| 4127 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | |
| 4128 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | |
|
Vyacheslav Egorov (Chromium)
2011/11/15 12:03:56
This code does not pad the last @lazy-bailout in t
fschneider
2011/11/15 13:35:24
Done.
| |
| 4187 } | 4129 } |
| 4188 | 4130 |
| 4189 | 4131 |
| 4190 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 4132 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
| 4191 DeoptimizeIf(no_condition, instr->environment()); | 4133 DeoptimizeIf(no_condition, instr->environment()); |
| 4192 } | 4134 } |
| 4193 | 4135 |
| 4194 | 4136 |
| 4195 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { | 4137 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { |
| 4196 LOperand* obj = instr->object(); | 4138 LOperand* obj = instr->object(); |
| 4197 LOperand* key = instr->key(); | 4139 LOperand* key = instr->key(); |
| 4198 EmitPushTaggedOperand(obj); | 4140 EmitPushTaggedOperand(obj); |
| 4199 EmitPushTaggedOperand(key); | 4141 EmitPushTaggedOperand(key); |
| 4200 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 4142 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); |
| 4201 LPointerMap* pointers = instr->pointer_map(); | 4143 LPointerMap* pointers = instr->pointer_map(); |
| 4202 LEnvironment* env = instr->deoptimization_environment(); | |
| 4203 RecordPosition(pointers->position()); | 4144 RecordPosition(pointers->position()); |
| 4204 RegisterEnvironmentForDeoptimization(env); | |
| 4205 // Create safepoint generator that will also ensure enough space in the | 4145 // Create safepoint generator that will also ensure enough space in the |
| 4206 // reloc info for patching in deoptimization (since this is invoking a | 4146 // reloc info for patching in deoptimization (since this is invoking a |
| 4207 // builtin) | 4147 // builtin) |
| 4208 SafepointGenerator safepoint_generator(this, | 4148 SafepointGenerator safepoint_generator( |
| 4209 pointers, | 4149 this, pointers, Safepoint::kLazyDeopt); |
| 4210 env->deoptimization_index()); | |
| 4211 __ Push(Smi::FromInt(strict_mode_flag())); | 4150 __ Push(Smi::FromInt(strict_mode_flag())); |
| 4212 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator); | 4151 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator); |
| 4213 } | 4152 } |
| 4214 | 4153 |
| 4215 | 4154 |
| 4216 void LCodeGen::DoIn(LIn* instr) { | 4155 void LCodeGen::DoIn(LIn* instr) { |
| 4217 LOperand* obj = instr->object(); | 4156 LOperand* obj = instr->object(); |
| 4218 LOperand* key = instr->key(); | 4157 LOperand* key = instr->key(); |
| 4219 EmitPushTaggedOperand(key); | 4158 EmitPushTaggedOperand(key); |
| 4220 EmitPushTaggedOperand(obj); | 4159 EmitPushTaggedOperand(obj); |
| 4221 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 4160 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); |
| 4222 LPointerMap* pointers = instr->pointer_map(); | 4161 LPointerMap* pointers = instr->pointer_map(); |
| 4223 LEnvironment* env = instr->deoptimization_environment(); | |
| 4224 RecordPosition(pointers->position()); | 4162 RecordPosition(pointers->position()); |
| 4225 RegisterEnvironmentForDeoptimization(env); | 4163 SafepointGenerator safepoint_generator( |
| 4226 // Create safepoint generator that will also ensure enough space in the | 4164 this, pointers, Safepoint::kLazyDeopt); |
| 4227 // reloc info for patching in deoptimization (since this is invoking a | |
| 4228 // builtin) | |
| 4229 SafepointGenerator safepoint_generator(this, | |
| 4230 pointers, | |
| 4231 env->deoptimization_index()); | |
| 4232 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator); | 4165 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator); |
| 4233 } | 4166 } |
| 4234 | 4167 |
| 4235 | 4168 |
| 4236 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { | 4169 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { |
| 4237 { | 4170 PushSafepointRegistersScope scope(this); |
| 4238 PushSafepointRegistersScope scope(this); | 4171 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 4239 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 4172 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); |
| 4240 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); | 4173 RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0); |
| 4241 RegisterLazyDeoptimization(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0); | 4174 ASSERT(instr->HasEnvironment()); |
| 4242 } | 4175 LEnvironment* env = instr->environment(); |
| 4243 | 4176 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 4244 // The gap code includes the restoring of the safepoint registers. | |
| 4245 int pc = masm()->pc_offset(); | |
| 4246 safepoints_.SetPcAfterGap(pc); | |
| 4247 } | 4177 } |
| 4248 | 4178 |
| 4249 | 4179 |
| 4250 void LCodeGen::DoStackCheck(LStackCheck* instr) { | 4180 void LCodeGen::DoStackCheck(LStackCheck* instr) { |
| 4251 class DeferredStackCheck: public LDeferredCode { | 4181 class DeferredStackCheck: public LDeferredCode { |
| 4252 public: | 4182 public: |
| 4253 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr) | 4183 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr) |
| 4254 : LDeferredCode(codegen), instr_(instr) { } | 4184 : LDeferredCode(codegen), instr_(instr) { } |
| 4255 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } | 4185 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } |
| 4256 virtual LInstruction* instr() { return instr_; } | 4186 virtual LInstruction* instr() { return instr_; } |
| 4257 private: | 4187 private: |
| 4258 LStackCheck* instr_; | 4188 LStackCheck* instr_; |
| 4259 }; | 4189 }; |
| 4260 | 4190 |
| 4191 ASSERT(instr->HasEnvironment()); | |
| 4192 LEnvironment* env = instr->environment(); | |
| 4261 if (instr->hydrogen()->is_function_entry()) { | 4193 if (instr->hydrogen()->is_function_entry()) { |
| 4262 // Perform stack overflow check. | 4194 // Perform stack overflow check. |
| 4263 Label done; | 4195 Label done; |
| 4264 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 4196 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
| 4265 __ j(above_equal, &done, Label::kNear); | 4197 __ j(above_equal, &done, Label::kNear); |
| 4266 StackCheckStub stub; | 4198 StackCheckStub stub; |
| 4267 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 4199 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 4268 __ bind(&done); | 4200 __ bind(&done); |
| 4201 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | |
| 4202 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | |
| 4269 } else { | 4203 } else { |
| 4270 ASSERT(instr->hydrogen()->is_backwards_branch()); | 4204 ASSERT(instr->hydrogen()->is_backwards_branch()); |
| 4271 // Perform stack overflow check if this goto needs it before jumping. | 4205 // Perform stack overflow check if this goto needs it before jumping. |
| 4272 DeferredStackCheck* deferred_stack_check = | 4206 DeferredStackCheck* deferred_stack_check = |
| 4273 new DeferredStackCheck(this, instr); | 4207 new DeferredStackCheck(this, instr); |
| 4274 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 4208 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
| 4275 __ j(below, deferred_stack_check->entry()); | 4209 __ j(below, deferred_stack_check->entry()); |
| 4276 __ bind(instr->done_label()); | 4210 __ bind(instr->done_label()); |
| 4277 deferred_stack_check->SetExit(instr->done_label()); | 4211 deferred_stack_check->SetExit(instr->done_label()); |
| 4212 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | |
| 4213 // Don't record a deoptimization index for the safepoint here. | |
| 4214 // This will be done explicitly when emitting call and the safepoint in | |
| 4215 // the deferred code. | |
| 4278 } | 4216 } |
| 4279 } | 4217 } |
| 4280 | 4218 |
| 4281 | 4219 |
| 4282 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { | 4220 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { |
| 4283 // This is a pseudo-instruction that ensures that the environment here is | 4221 // This is a pseudo-instruction that ensures that the environment here is |
| 4284 // properly registered for deoptimization and records the assembler's PC | 4222 // properly registered for deoptimization and records the assembler's PC |
| 4285 // offset. | 4223 // offset. |
| 4286 LEnvironment* environment = instr->environment(); | 4224 LEnvironment* environment = instr->environment(); |
| 4287 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), | 4225 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), |
| 4288 instr->SpilledDoubleRegisterArray()); | 4226 instr->SpilledDoubleRegisterArray()); |
| 4289 | 4227 |
| 4290 // If the environment were already registered, we would have no way of | 4228 // If the environment were already registered, we would have no way of |
| 4291 // backpatching it with the spill slot operands. | 4229 // backpatching it with the spill slot operands. |
| 4292 ASSERT(!environment->HasBeenRegistered()); | 4230 ASSERT(!environment->HasBeenRegistered()); |
| 4293 RegisterEnvironmentForDeoptimization(environment); | 4231 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
| 4294 ASSERT(osr_pc_offset_ == -1); | 4232 ASSERT(osr_pc_offset_ == -1); |
| 4295 osr_pc_offset_ = masm()->pc_offset(); | 4233 osr_pc_offset_ = masm()->pc_offset(); |
| 4296 } | 4234 } |
| 4297 | 4235 |
| 4298 #undef __ | 4236 #undef __ |
| 4299 | 4237 |
| 4300 } } // namespace v8::internal | 4238 } } // namespace v8::internal |
| 4301 | 4239 |
| 4302 #endif // V8_TARGET_ARCH_X64 | 4240 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |