Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(71)

Side by Side Diff: src/x64/lithium-codegen-x64.cc

Issue 8492004: Fix lazy deoptimization at HInvokeFunction and enable target-recording call-function stub. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: added nop-padding and assertions on all platforms Created 9 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | test/mjsunit/compiler/regress-funcaller.js » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 25 matching lines...) Expand all
36 namespace v8 { 36 namespace v8 {
37 namespace internal { 37 namespace internal {
38 38
39 39
40 // When invoking builtins, we need to record the safepoint in the middle of 40 // When invoking builtins, we need to record the safepoint in the middle of
41 // the invoke instruction sequence generated by the macro assembler. 41 // the invoke instruction sequence generated by the macro assembler.
42 class SafepointGenerator : public CallWrapper { 42 class SafepointGenerator : public CallWrapper {
43 public: 43 public:
44 SafepointGenerator(LCodeGen* codegen, 44 SafepointGenerator(LCodeGen* codegen,
45 LPointerMap* pointers, 45 LPointerMap* pointers,
46 int deoptimization_index) 46 Safepoint::DeoptMode mode)
47 : codegen_(codegen), 47 : codegen_(codegen),
48 pointers_(pointers), 48 pointers_(pointers),
49 deoptimization_index_(deoptimization_index) { } 49 deopt_mode_(mode) { }
50 virtual ~SafepointGenerator() { } 50 virtual ~SafepointGenerator() { }
51 51
52 virtual void BeforeCall(int call_size) const { 52 virtual void BeforeCall(int call_size) const { }
53 ASSERT(call_size >= 0);
54 // Ensure that we have enough space after the previous safepoint position
55 // for the jump generated there.
56 int call_end = codegen_->masm()->pc_offset() + call_size;
57 int prev_jump_end = codegen_->LastSafepointEnd() + kMinSafepointSize;
58 if (call_end < prev_jump_end) {
59 int padding_size = prev_jump_end - call_end;
60 STATIC_ASSERT(kMinSafepointSize <= 9); // One multibyte nop is enough.
61 codegen_->masm()->nop(padding_size);
62 }
63 }
64 53
65 virtual void AfterCall() const { 54 virtual void AfterCall() const {
66 codegen_->RecordSafepoint(pointers_, deoptimization_index_); 55 codegen_->RecordSafepoint(pointers_, deopt_mode_);
67 } 56 }
68 57
69 private: 58 private:
70 static const int kMinSafepointSize =
71 MacroAssembler::kShortCallInstructionLength;
72 LCodeGen* codegen_; 59 LCodeGen* codegen_;
73 LPointerMap* pointers_; 60 LPointerMap* pointers_;
74 int deoptimization_index_; 61 Safepoint::DeoptMode deopt_mode_;
75 }; 62 };
76 63
77 64
78 #define __ masm()-> 65 #define __ masm()->
79 66
80 bool LCodeGen::GenerateCode() { 67 bool LCodeGen::GenerateCode() {
81 HPhase phase("Code generation", chunk()); 68 HPhase phase("Code generation", chunk());
82 ASSERT(is_unused()); 69 ASSERT(is_unused());
83 status_ = GENERATING; 70 status_ = GENERATING;
84 71
85 // Open a frame scope to indicate that there is a frame on the stack. The 72 // Open a frame scope to indicate that there is a frame on the stack. The
86 // MANUAL indicates that the scope shouldn't actually generate code to set up 73 // MANUAL indicates that the scope shouldn't actually generate code to set up
87 // the frame (that is done in GeneratePrologue). 74 // the frame (that is done in GeneratePrologue).
88 FrameScope frame_scope(masm_, StackFrame::MANUAL); 75 FrameScope frame_scope(masm_, StackFrame::MANUAL);
89 76
90 return GeneratePrologue() && 77 return GeneratePrologue() &&
91 GenerateBody() && 78 GenerateBody() &&
92 GenerateDeferredCode() && 79 GenerateDeferredCode() &&
93 GenerateJumpTable() && 80 GenerateJumpTable() &&
94 GenerateSafepointTable(); 81 GenerateSafepointTable();
95 } 82 }
96 83
97 84
98 void LCodeGen::FinishCode(Handle<Code> code) { 85 void LCodeGen::FinishCode(Handle<Code> code) {
99 ASSERT(is_done()); 86 ASSERT(is_done());
100 code->set_stack_slots(GetStackSlotCount()); 87 code->set_stack_slots(GetStackSlotCount());
101 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); 88 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
102 PopulateDeoptimizationData(code); 89 PopulateDeoptimizationData(code);
103 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
104 } 90 }
105 91
106 92
107 void LCodeGen::Abort(const char* format, ...) { 93 void LCodeGen::Abort(const char* format, ...) {
108 if (FLAG_trace_bailout) { 94 if (FLAG_trace_bailout) {
109 SmartArrayPointer<char> name( 95 SmartArrayPointer<char> name(
110 info()->shared_info()->DebugName()->ToCString()); 96 info()->shared_info()->DebugName()->ToCString());
111 PrintF("Aborting LCodeGen in @\"%s\": ", *name); 97 PrintF("Aborting LCodeGen in @\"%s\": ", *name);
112 va_list arguments; 98 va_list arguments;
113 va_start(arguments, format); 99 va_start(arguments, format);
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
199 if (heap_slots > 0) { 185 if (heap_slots > 0) {
200 Comment(";;; Allocate local context"); 186 Comment(";;; Allocate local context");
201 // Argument to NewContext is the function, which is still in rdi. 187 // Argument to NewContext is the function, which is still in rdi.
202 __ push(rdi); 188 __ push(rdi);
203 if (heap_slots <= FastNewContextStub::kMaximumSlots) { 189 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
204 FastNewContextStub stub(heap_slots); 190 FastNewContextStub stub(heap_slots);
205 __ CallStub(&stub); 191 __ CallStub(&stub);
206 } else { 192 } else {
207 __ CallRuntime(Runtime::kNewFunctionContext, 1); 193 __ CallRuntime(Runtime::kNewFunctionContext, 1);
208 } 194 }
209 RecordSafepoint(Safepoint::kNoDeoptimizationIndex); 195 RecordSafepoint(Safepoint::kNoLazyDeopt);
210 // Context is returned in both rax and rsi. It replaces the context 196 // Context is returned in both rax and rsi. It replaces the context
211 // passed to us. It's saved in the stack and kept live in rsi. 197 // passed to us. It's saved in the stack and kept live in rsi.
212 __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); 198 __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
213 199
214 // Copy any necessary parameters into the context. 200 // Copy any necessary parameters into the context.
215 int num_parameters = scope()->num_parameters(); 201 int num_parameters = scope()->num_parameters();
216 for (int i = 0; i < num_parameters; i++) { 202 for (int i = 0; i < num_parameters; i++) {
217 Variable* var = scope()->parameter(i); 203 Variable* var = scope()->parameter(i);
218 if (var->IsContextSlot()) { 204 if (var->IsContextSlot()) {
219 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 205 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
(...skipping 28 matching lines...) Expand all
248 if (instr->IsLabel()) { 234 if (instr->IsLabel()) {
249 LLabel* label = LLabel::cast(instr); 235 LLabel* label = LLabel::cast(instr);
250 emit_instructions = !label->HasReplacement(); 236 emit_instructions = !label->HasReplacement();
251 } 237 }
252 238
253 if (emit_instructions) { 239 if (emit_instructions) {
254 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic()); 240 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
255 instr->CompileToNative(this); 241 instr->CompileToNative(this);
256 } 242 }
257 } 243 }
244 EnsureSpaceForLazyDeopt();
258 return !is_aborted(); 245 return !is_aborted();
259 } 246 }
260 247
261 248
262 LInstruction* LCodeGen::GetNextInstruction() {
263 if (current_instruction_ < instructions_->length() - 1) {
264 return instructions_->at(current_instruction_ + 1);
265 } else {
266 return NULL;
267 }
268 }
269
270
271 bool LCodeGen::GenerateJumpTable() { 249 bool LCodeGen::GenerateJumpTable() {
272 for (int i = 0; i < jump_table_.length(); i++) { 250 for (int i = 0; i < jump_table_.length(); i++) {
273 __ bind(&jump_table_[i].label); 251 __ bind(&jump_table_[i].label);
274 __ Jump(jump_table_[i].address, RelocInfo::RUNTIME_ENTRY); 252 __ Jump(jump_table_[i].address, RelocInfo::RUNTIME_ENTRY);
275 } 253 }
276 return !is_aborted(); 254 return !is_aborted();
277 } 255 }
278 256
279 257
280 bool LCodeGen::GenerateDeferredCode() { 258 bool LCodeGen::GenerateDeferredCode() {
281 ASSERT(is_generating()); 259 ASSERT(is_generating());
282 if (deferred_.length() > 0) { 260 if (deferred_.length() > 0) {
283 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { 261 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
284 LDeferredCode* code = deferred_[i]; 262 LDeferredCode* code = deferred_[i];
285 __ bind(code->entry()); 263 __ bind(code->entry());
286 Comment(";;; Deferred code @%d: %s.", 264 Comment(";;; Deferred code @%d: %s.",
287 code->instruction_index(), 265 code->instruction_index(),
288 code->instr()->Mnemonic()); 266 code->instr()->Mnemonic());
289 code->Generate(); 267 code->Generate();
290 __ jmp(code->exit()); 268 __ jmp(code->exit());
291 } 269 }
292
293 // Pad code to ensure that the last piece of deferred code have
294 // room for lazy bailout.
295 while ((masm()->pc_offset() - LastSafepointEnd())
296 < Deoptimizer::patch_size()) {
297 int padding = masm()->pc_offset() - LastSafepointEnd();
298 if (padding > 9) {
299 __ nop(9);
300 } else {
301 __ nop(padding);
302 }
303 }
304 } 270 }
305 271
306 // Deferred code is the last part of the instruction sequence. Mark 272 // Deferred code is the last part of the instruction sequence. Mark
307 // the generated code as done unless we bailed out. 273 // the generated code as done unless we bailed out.
308 if (!is_aborted()) status_ = DONE; 274 if (!is_aborted()) status_ = DONE;
309 return !is_aborted(); 275 return !is_aborted();
310 } 276 }
311 277
312 278
313 bool LCodeGen::GenerateSafepointTable() { 279 bool LCodeGen::GenerateSafepointTable() {
314 ASSERT(is_done()); 280 ASSERT(is_done());
315 // Ensure that there is space at the end of the code to write a number
316 // of jump instructions, as well as to afford writing a call near the end
317 // of the code.
318 // The jumps are used when there isn't room in the code stream to write
319 // a long call instruction. Instead it writes a shorter call to a
320 // jump instruction in the same code object.
321 // The calls are used when lazy deoptimizing a function and calls to a
322 // deoptimization function.
323 int short_deopts = safepoints_.CountShortDeoptimizationIntervals(
324 static_cast<unsigned>(MacroAssembler::kJumpInstructionLength));
325 int byte_count = (short_deopts) * MacroAssembler::kJumpInstructionLength;
326 while (byte_count-- > 0) {
327 __ int3();
328 }
329 safepoints_.Emit(masm(), GetStackSlotCount()); 281 safepoints_.Emit(masm(), GetStackSlotCount());
330 return !is_aborted(); 282 return !is_aborted();
331 } 283 }
332 284
333 285
334 Register LCodeGen::ToRegister(int index) const { 286 Register LCodeGen::ToRegister(int index) const {
335 return Register::FromAllocationIndex(index); 287 return Register::FromAllocationIndex(index);
336 } 288 }
337 289
338 290
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after
484 436
485 void LCodeGen::CallCodeGeneric(Handle<Code> code, 437 void LCodeGen::CallCodeGeneric(Handle<Code> code,
486 RelocInfo::Mode mode, 438 RelocInfo::Mode mode,
487 LInstruction* instr, 439 LInstruction* instr,
488 SafepointMode safepoint_mode, 440 SafepointMode safepoint_mode,
489 int argc) { 441 int argc) {
490 ASSERT(instr != NULL); 442 ASSERT(instr != NULL);
491 LPointerMap* pointers = instr->pointer_map(); 443 LPointerMap* pointers = instr->pointer_map();
492 RecordPosition(pointers->position()); 444 RecordPosition(pointers->position());
493 __ call(code, mode); 445 __ call(code, mode);
494 RegisterLazyDeoptimization(instr, safepoint_mode, argc); 446 RecordSafepointWithLazyDeopt(instr, safepoint_mode, argc);
495 447
496 // Signal that we don't inline smi code before these stubs in the 448 // Signal that we don't inline smi code before these stubs in the
497 // optimizing code generator. 449 // optimizing code generator.
498 if (code->kind() == Code::BINARY_OP_IC || 450 if (code->kind() == Code::BINARY_OP_IC ||
499 code->kind() == Code::COMPARE_IC) { 451 code->kind() == Code::COMPARE_IC) {
500 __ nop(); 452 __ nop();
501 } 453 }
502 } 454 }
503 455
504 456
505 void LCodeGen::CallCode(Handle<Code> code, 457 void LCodeGen::CallCode(Handle<Code> code,
506 RelocInfo::Mode mode, 458 RelocInfo::Mode mode,
507 LInstruction* instr) { 459 LInstruction* instr) {
508 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, 0); 460 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, 0);
509 } 461 }
510 462
511 463
512 void LCodeGen::CallRuntime(const Runtime::Function* function, 464 void LCodeGen::CallRuntime(const Runtime::Function* function,
513 int num_arguments, 465 int num_arguments,
514 LInstruction* instr) { 466 LInstruction* instr) {
515 ASSERT(instr != NULL); 467 ASSERT(instr != NULL);
516 ASSERT(instr->HasPointerMap()); 468 ASSERT(instr->HasPointerMap());
517 LPointerMap* pointers = instr->pointer_map(); 469 LPointerMap* pointers = instr->pointer_map();
518 RecordPosition(pointers->position()); 470 RecordPosition(pointers->position());
519 471
520 __ CallRuntime(function, num_arguments); 472 __ CallRuntime(function, num_arguments);
521 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT, 0); 473 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0);
522 } 474 }
523 475
524 476
525 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, 477 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
526 int argc, 478 int argc,
527 LInstruction* instr) { 479 LInstruction* instr) {
528 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 480 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
529 __ CallRuntimeSaveDoubles(id); 481 __ CallRuntimeSaveDoubles(id);
530 RecordSafepointWithRegisters( 482 RecordSafepointWithRegisters(
531 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex); 483 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
532 } 484 }
533 485
534 486
535 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr, 487 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
536 SafepointMode safepoint_mode, 488 Safepoint::DeoptMode mode) {
537 int argc) {
538 // Create the environment to bailout to. If the call has side effects
539 // execution has to continue after the call otherwise execution can continue
540 // from a previous bailout point repeating the call.
541 LEnvironment* deoptimization_environment;
542 if (instr->HasDeoptimizationEnvironment()) {
543 deoptimization_environment = instr->deoptimization_environment();
544 } else {
545 deoptimization_environment = instr->environment();
546 }
547
548 RegisterEnvironmentForDeoptimization(deoptimization_environment);
549 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
550 ASSERT(argc == 0);
551 RecordSafepoint(instr->pointer_map(),
552 deoptimization_environment->deoptimization_index());
553 } else {
554 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS);
555 RecordSafepointWithRegisters(
556 instr->pointer_map(),
557 argc,
558 deoptimization_environment->deoptimization_index());
559 }
560 }
561
562
563 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
564 if (!environment->HasBeenRegistered()) { 489 if (!environment->HasBeenRegistered()) {
565 // Physical stack frame layout: 490 // Physical stack frame layout:
566 // -x ............. -4 0 ..................................... y 491 // -x ............. -4 0 ..................................... y
567 // [incoming arguments] [spill slots] [pushed outgoing arguments] 492 // [incoming arguments] [spill slots] [pushed outgoing arguments]
568 493
569 // Layout of the environment: 494 // Layout of the environment:
570 // 0 ..................................................... size-1 495 // 0 ..................................................... size-1
571 // [parameters] [locals] [expression stack including arguments] 496 // [parameters] [locals] [expression stack including arguments]
572 497
573 // Layout of the translation: 498 // Layout of the translation:
574 // 0 ........................................................ size - 1 + 4 499 // 0 ........................................................ size - 1 + 4
575 // [expression stack including arguments] [locals] [4 words] [parameters] 500 // [expression stack including arguments] [locals] [4 words] [parameters]
576 // |>------------ translation_size ------------<| 501 // |>------------ translation_size ------------<|
577 502
578 int frame_count = 0; 503 int frame_count = 0;
579 for (LEnvironment* e = environment; e != NULL; e = e->outer()) { 504 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
580 ++frame_count; 505 ++frame_count;
581 } 506 }
582 Translation translation(&translations_, frame_count); 507 Translation translation(&translations_, frame_count);
583 WriteTranslation(environment, &translation); 508 WriteTranslation(environment, &translation);
584 int deoptimization_index = deoptimizations_.length(); 509 int deoptimization_index = deoptimizations_.length();
585 environment->Register(deoptimization_index, translation.index()); 510 int pc_offset = masm()->pc_offset();
511 environment->Register(deoptimization_index,
512 translation.index(),
513 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
586 deoptimizations_.Add(environment); 514 deoptimizations_.Add(environment);
587 } 515 }
588 } 516 }
589 517
590 518
591 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { 519 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
592 RegisterEnvironmentForDeoptimization(environment); 520 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
593 ASSERT(environment->HasBeenRegistered()); 521 ASSERT(environment->HasBeenRegistered());
594 int id = environment->deoptimization_index(); 522 int id = environment->deoptimization_index();
595 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER); 523 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
596 ASSERT(entry != NULL); 524 ASSERT(entry != NULL);
597 if (entry == NULL) { 525 if (entry == NULL) {
598 Abort("bailout was not prepared"); 526 Abort("bailout was not prepared");
599 return; 527 return;
600 } 528 }
601 529
602 if (cc == no_condition) { 530 if (cc == no_condition) {
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
634 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id())); 562 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
635 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_)); 563 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
636 564
637 // Populate the deoptimization entries. 565 // Populate the deoptimization entries.
638 for (int i = 0; i < length; i++) { 566 for (int i = 0; i < length; i++) {
639 LEnvironment* env = deoptimizations_[i]; 567 LEnvironment* env = deoptimizations_[i];
640 data->SetAstId(i, Smi::FromInt(env->ast_id())); 568 data->SetAstId(i, Smi::FromInt(env->ast_id()));
641 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index())); 569 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
642 data->SetArgumentsStackHeight(i, 570 data->SetArgumentsStackHeight(i,
643 Smi::FromInt(env->arguments_stack_height())); 571 Smi::FromInt(env->arguments_stack_height()));
572 data->SetPc(i, Smi::FromInt(env->pc_offset()));
644 } 573 }
645 code->set_deoptimization_data(*data); 574 code->set_deoptimization_data(*data);
646 } 575 }
647 576
648 577
649 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) { 578 int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
650 int result = deoptimization_literals_.length(); 579 int result = deoptimization_literals_.length();
651 for (int i = 0; i < deoptimization_literals_.length(); ++i) { 580 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
652 if (deoptimization_literals_[i].is_identical_to(literal)) return i; 581 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
653 } 582 }
(...skipping 11 matching lines...) Expand all
665 for (int i = 0, length = inlined_closures->length(); 594 for (int i = 0, length = inlined_closures->length();
666 i < length; 595 i < length;
667 i++) { 596 i++) {
668 DefineDeoptimizationLiteral(inlined_closures->at(i)); 597 DefineDeoptimizationLiteral(inlined_closures->at(i));
669 } 598 }
670 599
671 inlined_function_count_ = deoptimization_literals_.length(); 600 inlined_function_count_ = deoptimization_literals_.length();
672 } 601 }
673 602
674 603
604 void LCodeGen::RecordSafepointWithLazyDeopt(
605 LInstruction* instr, SafepointMode safepoint_mode, int argc) {
606 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
607 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
608 } else {
609 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS);
610 RecordSafepointWithRegisters(
611 instr->pointer_map(), argc, Safepoint::kLazyDeopt);
612 }
613 }
614
615
675 void LCodeGen::RecordSafepoint( 616 void LCodeGen::RecordSafepoint(
676 LPointerMap* pointers, 617 LPointerMap* pointers,
677 Safepoint::Kind kind, 618 Safepoint::Kind kind,
678 int arguments, 619 int arguments,
679 int deoptimization_index) { 620 Safepoint::DeoptMode deopt_mode) {
680 ASSERT(kind == expected_safepoint_kind_); 621 ASSERT(kind == expected_safepoint_kind_);
681 622
682 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands(); 623 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
683 624
684 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), 625 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
685 kind, arguments, deoptimization_index); 626 kind, arguments, deopt_mode);
686 for (int i = 0; i < operands->length(); i++) { 627 for (int i = 0; i < operands->length(); i++) {
687 LOperand* pointer = operands->at(i); 628 LOperand* pointer = operands->at(i);
688 if (pointer->IsStackSlot()) { 629 if (pointer->IsStackSlot()) {
689 safepoint.DefinePointerSlot(pointer->index()); 630 safepoint.DefinePointerSlot(pointer->index());
690 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { 631 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
691 safepoint.DefinePointerRegister(ToRegister(pointer)); 632 safepoint.DefinePointerRegister(ToRegister(pointer));
692 } 633 }
693 } 634 }
694 if (kind & Safepoint::kWithRegisters) { 635 if (kind & Safepoint::kWithRegisters) {
695 // Register rsi always contains a pointer to the context. 636 // Register rsi always contains a pointer to the context.
696 safepoint.DefinePointerRegister(rsi); 637 safepoint.DefinePointerRegister(rsi);
697 } 638 }
698 } 639 }
699 640
700 641
701 void LCodeGen::RecordSafepoint(LPointerMap* pointers, 642 void LCodeGen::RecordSafepoint(LPointerMap* pointers,
702 int deoptimization_index) { 643 Safepoint::DeoptMode deopt_mode) {
703 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index); 644 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode);
704 } 645 }
705 646
706 647
707 void LCodeGen::RecordSafepoint(int deoptimization_index) { 648 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
708 LPointerMap empty_pointers(RelocInfo::kNoPosition); 649 LPointerMap empty_pointers(RelocInfo::kNoPosition);
709 RecordSafepoint(&empty_pointers, deoptimization_index); 650 RecordSafepoint(&empty_pointers, deopt_mode);
710 } 651 }
711 652
712 653
713 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, 654 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
714 int arguments, 655 int arguments,
715 int deoptimization_index) { 656 Safepoint::DeoptMode deopt_mode) {
716 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, 657 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, deopt_mode);
717 deoptimization_index);
718 } 658 }
719 659
720 660
721 void LCodeGen::RecordPosition(int position) { 661 void LCodeGen::RecordPosition(int position) {
722 if (position == RelocInfo::kNoPosition) return; 662 if (position == RelocInfo::kNoPosition) return;
723 masm()->positions_recorder()->RecordPosition(position); 663 masm()->positions_recorder()->RecordPosition(position);
724 } 664 }
725 665
726 666
727 void LCodeGen::DoLabel(LLabel* label) { 667 void LCodeGen::DoLabel(LLabel* label) {
(...skipping 14 matching lines...) Expand all
742 682
743 683
744 void LCodeGen::DoGap(LGap* gap) { 684 void LCodeGen::DoGap(LGap* gap) {
745 for (int i = LGap::FIRST_INNER_POSITION; 685 for (int i = LGap::FIRST_INNER_POSITION;
746 i <= LGap::LAST_INNER_POSITION; 686 i <= LGap::LAST_INNER_POSITION;
747 i++) { 687 i++) {
748 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i); 688 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
749 LParallelMove* move = gap->GetParallelMove(inner_pos); 689 LParallelMove* move = gap->GetParallelMove(inner_pos);
750 if (move != NULL) DoParallelMove(move); 690 if (move != NULL) DoParallelMove(move);
751 } 691 }
752
753 LInstruction* next = GetNextInstruction();
754 if (next != NULL && next->IsLazyBailout()) {
755 int pc = masm()->pc_offset();
756 safepoints_.SetPcAfterGap(pc);
757 }
758 } 692 }
759 693
760 694
761 void LCodeGen::DoInstructionGap(LInstructionGap* instr) { 695 void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
762 DoGap(instr); 696 DoGap(instr);
763 } 697 }
764 698
765 699
766 void LCodeGen::DoParameter(LParameter* instr) { 700 void LCodeGen::DoParameter(LParameter* instr) {
767 // Nothing to do. 701 // Nothing to do.
(...skipping 1114 matching lines...) Expand 10 before | Expand all | Expand 10 after
1882 } 1816 }
1883 1817
1884 1818
1885 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { 1819 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1886 class DeferredInstanceOfKnownGlobal: public LDeferredCode { 1820 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
1887 public: 1821 public:
1888 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, 1822 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
1889 LInstanceOfKnownGlobal* instr) 1823 LInstanceOfKnownGlobal* instr)
1890 : LDeferredCode(codegen), instr_(instr) { } 1824 : LDeferredCode(codegen), instr_(instr) { }
1891 virtual void Generate() { 1825 virtual void Generate() {
1892 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_); 1826 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
1893 } 1827 }
1894 virtual LInstruction* instr() { return instr_; } 1828 virtual LInstruction* instr() { return instr_; }
1895 Label* map_check() { return &map_check_; } 1829 Label* map_check() { return &map_check_; }
1896 private: 1830 private:
1897 LInstanceOfKnownGlobal* instr_; 1831 LInstanceOfKnownGlobal* instr_;
1898 Label map_check_; 1832 Label map_check_;
1899 }; 1833 };
1900 1834
1901 1835
1902 DeferredInstanceOfKnownGlobal* deferred; 1836 DeferredInstanceOfKnownGlobal* deferred;
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
1940 __ JumpIfNotString(object, kScratchRegister, deferred->entry()); 1874 __ JumpIfNotString(object, kScratchRegister, deferred->entry());
1941 1875
1942 __ bind(&false_result); 1876 __ bind(&false_result);
1943 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); 1877 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
1944 1878
1945 __ bind(deferred->exit()); 1879 __ bind(deferred->exit());
1946 __ bind(&done); 1880 __ bind(&done);
1947 } 1881 }
1948 1882
1949 1883
1950 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, 1884 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
1951 Label* map_check) { 1885 Label* map_check) {
1952 { 1886 {
1953 PushSafepointRegistersScope scope(this); 1887 PushSafepointRegistersScope scope(this);
1954 InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>( 1888 InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>(
1955 InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck); 1889 InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck);
1956 InstanceofStub stub(flags); 1890 InstanceofStub stub(flags);
1957 1891
1958 __ push(ToRegister(instr->InputAt(0))); 1892 __ push(ToRegister(instr->InputAt(0)));
1959 __ Push(instr->function()); 1893 __ Push(instr->function());
1960 1894
1961 static const int kAdditionalDelta = 10; 1895 static const int kAdditionalDelta = 10;
1962 int delta = 1896 int delta =
1963 masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; 1897 masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
1964 ASSERT(delta >= 0); 1898 ASSERT(delta >= 0);
1965 __ push_imm32(delta); 1899 __ push_imm32(delta);
1966 1900
1967 // We are pushing three values on the stack but recording a 1901 // We are pushing three values on the stack but recording a
1968 // safepoint with two arguments because stub is going to 1902 // safepoint with two arguments because stub is going to
1969 // remove the third argument from the stack before jumping 1903 // remove the third argument from the stack before jumping
1970 // to instanceof builtin on the slow path. 1904 // to instanceof builtin on the slow path.
1971 CallCodeGeneric(stub.GetCode(), 1905 CallCodeGeneric(stub.GetCode(),
1972 RelocInfo::CODE_TARGET, 1906 RelocInfo::CODE_TARGET,
1973 instr, 1907 instr,
1974 RECORD_SAFEPOINT_WITH_REGISTERS, 1908 RECORD_SAFEPOINT_WITH_REGISTERS,
1975 2); 1909 2);
1976 ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check)); 1910 ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check));
1911 ASSERT(instr->HasDeoptimizationEnvironment());
1912 LEnvironment* env = instr->deoptimization_environment();
1913 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
1977 // Move result to a register that survives the end of the 1914 // Move result to a register that survives the end of the
1978 // PushSafepointRegisterScope. 1915 // PushSafepointRegisterScope.
1979 __ movq(kScratchRegister, rax); 1916 __ movq(kScratchRegister, rax);
1980 } 1917 }
1981 __ testq(kScratchRegister, kScratchRegister); 1918 __ testq(kScratchRegister, kScratchRegister);
1982 Label load_false; 1919 Label load_false;
1983 Label done; 1920 Label done;
1984 __ j(not_zero, &load_false); 1921 __ j(not_zero, &load_false);
1985 __ LoadRoot(rax, Heap::kTrueValueRootIndex); 1922 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
1986 __ jmp(&done); 1923 __ jmp(&done);
(...skipping 575 matching lines...) Expand 10 before | Expand all | Expand 10 after
2562 __ j(zero, &invoke, Label::kNear); 2499 __ j(zero, &invoke, Label::kNear);
2563 __ bind(&loop); 2500 __ bind(&loop);
2564 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize)); 2501 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize));
2565 __ decl(length); 2502 __ decl(length);
2566 __ j(not_zero, &loop); 2503 __ j(not_zero, &loop);
2567 2504
2568 // Invoke the function. 2505 // Invoke the function.
2569 __ bind(&invoke); 2506 __ bind(&invoke);
2570 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); 2507 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2571 LPointerMap* pointers = instr->pointer_map(); 2508 LPointerMap* pointers = instr->pointer_map();
2572 LEnvironment* env = instr->deoptimization_environment();
2573 RecordPosition(pointers->position()); 2509 RecordPosition(pointers->position());
2574 RegisterEnvironmentForDeoptimization(env); 2510 SafepointGenerator safepoint_generator(
2575 SafepointGenerator safepoint_generator(this, 2511 this, pointers, Safepoint::kLazyDeopt);
2576 pointers,
2577 env->deoptimization_index());
2578 v8::internal::ParameterCount actual(rax); 2512 v8::internal::ParameterCount actual(rax);
2579 __ InvokeFunction(function, actual, CALL_FUNCTION, 2513 __ InvokeFunction(function, actual, CALL_FUNCTION,
2580 safepoint_generator, CALL_AS_METHOD); 2514 safepoint_generator, CALL_AS_METHOD);
2581 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 2515 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2582 } 2516 }
2583 2517
2584 2518
2585 void LCodeGen::DoPushArgument(LPushArgument* instr) { 2519 void LCodeGen::DoPushArgument(LPushArgument* instr) {
2586 LOperand* argument = instr->InputAt(0); 2520 LOperand* argument = instr->InputAt(0);
2587 EmitPushTaggedOperand(argument); 2521 EmitPushTaggedOperand(argument);
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
2645 2579
2646 // Invoke function. 2580 // Invoke function.
2647 __ SetCallKind(rcx, call_kind); 2581 __ SetCallKind(rcx, call_kind);
2648 if (*function == *info()->closure()) { 2582 if (*function == *info()->closure()) {
2649 __ CallSelf(); 2583 __ CallSelf();
2650 } else { 2584 } else {
2651 __ call(FieldOperand(rdi, JSFunction::kCodeEntryOffset)); 2585 __ call(FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2652 } 2586 }
2653 2587
2654 // Setup deoptimization. 2588 // Setup deoptimization.
2655 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT, 0); 2589 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0);
2656 2590
2657 // Restore context. 2591 // Restore context.
2658 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 2592 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2659 } 2593 }
2660 2594
2661 2595
2662 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { 2596 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2663 ASSERT(ToRegister(instr->result()).is(rax)); 2597 ASSERT(ToRegister(instr->result()).is(rax));
2664 __ Move(rdi, instr->function()); 2598 __ Move(rdi, instr->function());
2665 CallKnownFunction(instr->function(), 2599 CallKnownFunction(instr->function(),
(...skipping 327 matching lines...) Expand 10 before | Expand all | Expand 10 after
2993 UNREACHABLE(); 2927 UNREACHABLE();
2994 } 2928 }
2995 } 2929 }
2996 2930
2997 2931
2998 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { 2932 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
2999 ASSERT(ToRegister(instr->function()).is(rdi)); 2933 ASSERT(ToRegister(instr->function()).is(rdi));
3000 ASSERT(instr->HasPointerMap()); 2934 ASSERT(instr->HasPointerMap());
3001 ASSERT(instr->HasDeoptimizationEnvironment()); 2935 ASSERT(instr->HasDeoptimizationEnvironment());
3002 LPointerMap* pointers = instr->pointer_map(); 2936 LPointerMap* pointers = instr->pointer_map();
3003 LEnvironment* env = instr->deoptimization_environment();
3004 RecordPosition(pointers->position()); 2937 RecordPosition(pointers->position());
3005 RegisterEnvironmentForDeoptimization(env); 2938 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
3006 SafepointGenerator generator(this, pointers, env->deoptimization_index());
3007 ParameterCount count(instr->arity()); 2939 ParameterCount count(instr->arity());
3008 __ InvokeFunction(rdi, count, CALL_FUNCTION, generator, CALL_AS_METHOD); 2940 __ InvokeFunction(rdi, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
3009 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 2941 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3010 } 2942 }
3011 2943
3012 2944
3013 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { 2945 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
3014 ASSERT(ToRegister(instr->key()).is(rcx)); 2946 ASSERT(ToRegister(instr->key()).is(rcx));
3015 ASSERT(ToRegister(instr->result()).is(rax)); 2947 ASSERT(ToRegister(instr->result()).is(rax));
3016 2948
(...skipping 1160 matching lines...) Expand 10 before | Expand all | Expand 10 after
4177 __ j(not_equal, &check_frame_marker, Label::kNear); 4109 __ j(not_equal, &check_frame_marker, Label::kNear);
4178 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset)); 4110 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset));
4179 4111
4180 // Check the marker in the calling frame. 4112 // Check the marker in the calling frame.
4181 __ bind(&check_frame_marker); 4113 __ bind(&check_frame_marker);
4182 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), 4114 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
4183 Smi::FromInt(StackFrame::CONSTRUCT)); 4115 Smi::FromInt(StackFrame::CONSTRUCT));
4184 } 4116 }
4185 4117
4186 4118
4119 void LCodeGen::EnsureSpaceForLazyDeopt() {
4120 // Ensure that we have enough space after the previous lazy-bailout
4121 // instruction for patching the code here.
4122 int current_pc = masm()->pc_offset();
4123 int patch_size = Deoptimizer::patch_size();
4124 if (current_pc < last_lazy_deopt_pc_ + patch_size) {
4125 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc;
4126 while (padding_size > 0) {
4127 int nop_size = padding_size > 9 ? 9 : padding_size;
4128 __ nop(nop_size);
4129 padding_size -= nop_size;
4130 }
4131 }
4132 last_lazy_deopt_pc_ = current_pc;
4133 }
4134
4135
4187 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { 4136 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
4188 // No code for lazy bailout instruction. Used to capture environment after a 4137 EnsureSpaceForLazyDeopt();
4189 // call for populating the safepoint data with deoptimization data. 4138 ASSERT(instr->HasEnvironment());
4139 LEnvironment* env = instr->environment();
4140 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4141 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
4190 } 4142 }
4191 4143
4192 4144
4193 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { 4145 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
4194 DeoptimizeIf(no_condition, instr->environment()); 4146 DeoptimizeIf(no_condition, instr->environment());
4195 } 4147 }
4196 4148
4197 4149
4198 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { 4150 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
4199 LOperand* obj = instr->object(); 4151 LOperand* obj = instr->object();
4200 LOperand* key = instr->key(); 4152 LOperand* key = instr->key();
4201 EmitPushTaggedOperand(obj); 4153 EmitPushTaggedOperand(obj);
4202 EmitPushTaggedOperand(key); 4154 EmitPushTaggedOperand(key);
4203 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); 4155 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4204 LPointerMap* pointers = instr->pointer_map(); 4156 LPointerMap* pointers = instr->pointer_map();
4205 LEnvironment* env = instr->deoptimization_environment();
4206 RecordPosition(pointers->position()); 4157 RecordPosition(pointers->position());
4207 RegisterEnvironmentForDeoptimization(env);
4208 // Create safepoint generator that will also ensure enough space in the 4158 // Create safepoint generator that will also ensure enough space in the
4209 // reloc info for patching in deoptimization (since this is invoking a 4159 // reloc info for patching in deoptimization (since this is invoking a
4210 // builtin) 4160 // builtin)
4211 SafepointGenerator safepoint_generator(this, 4161 SafepointGenerator safepoint_generator(
4212 pointers, 4162 this, pointers, Safepoint::kLazyDeopt);
4213 env->deoptimization_index());
4214 __ Push(Smi::FromInt(strict_mode_flag())); 4163 __ Push(Smi::FromInt(strict_mode_flag()));
4215 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator); 4164 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator);
4216 } 4165 }
4217 4166
4218 4167
4219 void LCodeGen::DoIn(LIn* instr) { 4168 void LCodeGen::DoIn(LIn* instr) {
4220 LOperand* obj = instr->object(); 4169 LOperand* obj = instr->object();
4221 LOperand* key = instr->key(); 4170 LOperand* key = instr->key();
4222 EmitPushTaggedOperand(key); 4171 EmitPushTaggedOperand(key);
4223 EmitPushTaggedOperand(obj); 4172 EmitPushTaggedOperand(obj);
4224 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); 4173 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4225 LPointerMap* pointers = instr->pointer_map(); 4174 LPointerMap* pointers = instr->pointer_map();
4226 LEnvironment* env = instr->deoptimization_environment();
4227 RecordPosition(pointers->position()); 4175 RecordPosition(pointers->position());
4228 RegisterEnvironmentForDeoptimization(env); 4176 SafepointGenerator safepoint_generator(
4229 // Create safepoint generator that will also ensure enough space in the 4177 this, pointers, Safepoint::kLazyDeopt);
4230 // reloc info for patching in deoptimization (since this is invoking a
4231 // builtin)
4232 SafepointGenerator safepoint_generator(this,
4233 pointers,
4234 env->deoptimization_index());
4235 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator); 4178 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
4236 } 4179 }
4237 4180
4238 4181
4239 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { 4182 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
4240 { 4183 PushSafepointRegistersScope scope(this);
4241 PushSafepointRegistersScope scope(this); 4184 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4242 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 4185 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
4243 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); 4186 RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0);
4244 RegisterLazyDeoptimization(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0); 4187 ASSERT(instr->HasEnvironment());
4245 } 4188 LEnvironment* env = instr->environment();
4246 4189 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
4247 // The gap code includes the restoring of the safepoint registers.
4248 int pc = masm()->pc_offset();
4249 safepoints_.SetPcAfterGap(pc);
4250 } 4190 }
4251 4191
4252 4192
4253 void LCodeGen::DoStackCheck(LStackCheck* instr) { 4193 void LCodeGen::DoStackCheck(LStackCheck* instr) {
4254 class DeferredStackCheck: public LDeferredCode { 4194 class DeferredStackCheck: public LDeferredCode {
4255 public: 4195 public:
4256 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr) 4196 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
4257 : LDeferredCode(codegen), instr_(instr) { } 4197 : LDeferredCode(codegen), instr_(instr) { }
4258 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } 4198 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
4259 virtual LInstruction* instr() { return instr_; } 4199 virtual LInstruction* instr() { return instr_; }
4260 private: 4200 private:
4261 LStackCheck* instr_; 4201 LStackCheck* instr_;
4262 }; 4202 };
4263 4203
4204 ASSERT(instr->HasEnvironment());
4205 LEnvironment* env = instr->environment();
4206 // There is no LLazyBailout instruction for stack-checks. We have to
4207 // prepare for lazy deoptimization explicitly here.
4264 if (instr->hydrogen()->is_function_entry()) { 4208 if (instr->hydrogen()->is_function_entry()) {
4265 // Perform stack overflow check. 4209 // Perform stack overflow check.
4266 Label done; 4210 Label done;
4267 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); 4211 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
4268 __ j(above_equal, &done, Label::kNear); 4212 __ j(above_equal, &done, Label::kNear);
4269 StackCheckStub stub; 4213 StackCheckStub stub;
4270 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 4214 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4215 EnsureSpaceForLazyDeopt();
4271 __ bind(&done); 4216 __ bind(&done);
4217 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4218 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
4272 } else { 4219 } else {
4273 ASSERT(instr->hydrogen()->is_backwards_branch()); 4220 ASSERT(instr->hydrogen()->is_backwards_branch());
4274 // Perform stack overflow check if this goto needs it before jumping. 4221 // Perform stack overflow check if this goto needs it before jumping.
4275 DeferredStackCheck* deferred_stack_check = 4222 DeferredStackCheck* deferred_stack_check =
4276 new DeferredStackCheck(this, instr); 4223 new DeferredStackCheck(this, instr);
4277 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); 4224 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
4278 __ j(below, deferred_stack_check->entry()); 4225 __ j(below, deferred_stack_check->entry());
4226 EnsureSpaceForLazyDeopt();
4279 __ bind(instr->done_label()); 4227 __ bind(instr->done_label());
4280 deferred_stack_check->SetExit(instr->done_label()); 4228 deferred_stack_check->SetExit(instr->done_label());
4229 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4230 // Don't record a deoptimization index for the safepoint here.
4231 // This will be done explicitly when emitting call and the safepoint in
4232 // the deferred code.
4281 } 4233 }
4282 } 4234 }
4283 4235
4284 4236
4285 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { 4237 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
4286 // This is a pseudo-instruction that ensures that the environment here is 4238 // This is a pseudo-instruction that ensures that the environment here is
4287 // properly registered for deoptimization and records the assembler's PC 4239 // properly registered for deoptimization and records the assembler's PC
4288 // offset. 4240 // offset.
4289 LEnvironment* environment = instr->environment(); 4241 LEnvironment* environment = instr->environment();
4290 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), 4242 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
4291 instr->SpilledDoubleRegisterArray()); 4243 instr->SpilledDoubleRegisterArray());
4292 4244
4293 // If the environment were already registered, we would have no way of 4245 // If the environment were already registered, we would have no way of
4294 // backpatching it with the spill slot operands. 4246 // backpatching it with the spill slot operands.
4295 ASSERT(!environment->HasBeenRegistered()); 4247 ASSERT(!environment->HasBeenRegistered());
4296 RegisterEnvironmentForDeoptimization(environment); 4248 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
4297 ASSERT(osr_pc_offset_ == -1); 4249 ASSERT(osr_pc_offset_ == -1);
4298 osr_pc_offset_ = masm()->pc_offset(); 4250 osr_pc_offset_ = masm()->pc_offset();
4299 } 4251 }
4300 4252
4301 #undef __ 4253 #undef __
4302 4254
4303 } } // namespace v8::internal 4255 } } // namespace v8::internal
4304 4256
4305 #endif // V8_TARGET_ARCH_X64 4257 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | test/mjsunit/compiler/regress-funcaller.js » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698