| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 43 class SafepointGenerator V8_FINAL : public CallWrapper { | 43 class SafepointGenerator V8_FINAL : public CallWrapper { |
| 44 public: | 44 public: |
| 45 SafepointGenerator(LCodeGen* codegen, | 45 SafepointGenerator(LCodeGen* codegen, |
| 46 LPointerMap* pointers, | 46 LPointerMap* pointers, |
| 47 Safepoint::DeoptMode mode) | 47 Safepoint::DeoptMode mode) |
| 48 : codegen_(codegen), | 48 : codegen_(codegen), |
| 49 pointers_(pointers), | 49 pointers_(pointers), |
| 50 deopt_mode_(mode) { } | 50 deopt_mode_(mode) { } |
| 51 virtual ~SafepointGenerator() {} | 51 virtual ~SafepointGenerator() {} |
| 52 | 52 |
| 53 virtual void BeforeCall(int call_size) const V8_OVERRIDE { | 53 virtual void BeforeCall(int call_size) const V8_OVERRIDE {} |
| 54 codegen_->EnsureSpaceForLazyDeopt(Deoptimizer::patch_size() - call_size); | |
| 55 } | |
| 56 | 54 |
| 57 virtual void AfterCall() const V8_OVERRIDE { | 55 virtual void AfterCall() const V8_OVERRIDE { |
| 58 codegen_->RecordSafepoint(pointers_, deopt_mode_); | 56 codegen_->RecordSafepoint(pointers_, deopt_mode_); |
| 59 } | 57 } |
| 60 | 58 |
| 61 private: | 59 private: |
| 62 LCodeGen* codegen_; | 60 LCodeGen* codegen_; |
| 63 LPointerMap* pointers_; | 61 LPointerMap* pointers_; |
| 64 Safepoint::DeoptMode deopt_mode_; | 62 Safepoint::DeoptMode deopt_mode_; |
| 65 }; | 63 }; |
| (...skipping 529 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 595 UNREACHABLE(); | 593 UNREACHABLE(); |
| 596 } | 594 } |
| 597 } | 595 } |
| 598 | 596 |
| 599 | 597 |
| 600 void LCodeGen::CallCodeGeneric(Handle<Code> code, | 598 void LCodeGen::CallCodeGeneric(Handle<Code> code, |
| 601 RelocInfo::Mode mode, | 599 RelocInfo::Mode mode, |
| 602 LInstruction* instr, | 600 LInstruction* instr, |
| 603 SafepointMode safepoint_mode, | 601 SafepointMode safepoint_mode, |
| 604 int argc) { | 602 int argc) { |
| 605 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size() - masm()->CallSize(code)); | |
| 606 ASSERT(instr != NULL); | 603 ASSERT(instr != NULL); |
| 607 __ call(code, mode); | 604 __ call(code, mode); |
| 608 RecordSafepointWithLazyDeopt(instr, safepoint_mode, argc); | 605 RecordSafepointWithLazyDeopt(instr, safepoint_mode, argc); |
| 609 | 606 |
| 610 // Signal that we don't inline smi code before these stubs in the | 607 // Signal that we don't inline smi code before these stubs in the |
| 611 // optimizing code generator. | 608 // optimizing code generator. |
| 612 if (code->kind() == Code::BINARY_OP_IC || | 609 if (code->kind() == Code::BINARY_OP_IC || |
| 613 code->kind() == Code::COMPARE_IC) { | 610 code->kind() == Code::COMPARE_IC) { |
| 614 __ nop(); | 611 __ nop(); |
| 615 } | 612 } |
| (...skipping 4791 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5407 __ movq(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset)); | 5404 __ movq(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset)); |
| 5408 | 5405 |
| 5409 // Check the marker in the calling frame. | 5406 // Check the marker in the calling frame. |
| 5410 __ bind(&check_frame_marker); | 5407 __ bind(&check_frame_marker); |
| 5411 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), | 5408 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), |
| 5412 Smi::FromInt(StackFrame::CONSTRUCT)); | 5409 Smi::FromInt(StackFrame::CONSTRUCT)); |
| 5413 } | 5410 } |
| 5414 | 5411 |
| 5415 | 5412 |
| 5416 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { | 5413 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { |
| 5417 if (info()->IsStub()) return; | 5414 if (!info()->IsStub()) { |
| 5418 // Ensure that we have enough space after the previous lazy-bailout | 5415 // Ensure that we have enough space after the previous lazy-bailout |
| 5419 // instruction for patching the code here. | 5416 // instruction for patching the code here. |
| 5420 int current_pc = masm()->pc_offset(); | 5417 int current_pc = masm()->pc_offset(); |
| 5421 if (current_pc < last_lazy_deopt_pc_ + space_needed) { | 5418 if (current_pc < last_lazy_deopt_pc_ + space_needed) { |
| 5422 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; | 5419 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; |
| 5423 __ Nop(padding_size); | 5420 __ Nop(padding_size); |
| 5421 } |
| 5424 } | 5422 } |
| 5423 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 5425 } | 5424 } |
| 5426 | 5425 |
| 5427 | 5426 |
| 5428 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 5427 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
| 5429 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 5428 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 5430 last_lazy_deopt_pc_ = masm()->pc_offset(); | |
| 5431 ASSERT(instr->HasEnvironment()); | 5429 ASSERT(instr->HasEnvironment()); |
| 5432 LEnvironment* env = instr->environment(); | 5430 LEnvironment* env = instr->environment(); |
| 5433 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5431 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 5434 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5432 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 5435 } | 5433 } |
| 5436 | 5434 |
| 5437 | 5435 |
| 5438 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 5436 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
| 5439 Deoptimizer::BailoutType type = instr->hydrogen()->type(); | 5437 Deoptimizer::BailoutType type = instr->hydrogen()->type(); |
| 5440 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the | 5438 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5493 Label done; | 5491 Label done; |
| 5494 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 5492 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
| 5495 __ j(above_equal, &done, Label::kNear); | 5493 __ j(above_equal, &done, Label::kNear); |
| 5496 | 5494 |
| 5497 ASSERT(instr->context()->IsRegister()); | 5495 ASSERT(instr->context()->IsRegister()); |
| 5498 ASSERT(ToRegister(instr->context()).is(rsi)); | 5496 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 5499 CallCode(isolate()->builtins()->StackCheck(), | 5497 CallCode(isolate()->builtins()->StackCheck(), |
| 5500 RelocInfo::CODE_TARGET, | 5498 RelocInfo::CODE_TARGET, |
| 5501 instr); | 5499 instr); |
| 5502 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 5500 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 5503 last_lazy_deopt_pc_ = masm()->pc_offset(); | |
| 5504 __ bind(&done); | 5501 __ bind(&done); |
| 5505 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5502 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 5506 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5503 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 5507 } else { | 5504 } else { |
| 5508 ASSERT(instr->hydrogen()->is_backwards_branch()); | 5505 ASSERT(instr->hydrogen()->is_backwards_branch()); |
| 5509 // Perform stack overflow check if this goto needs it before jumping. | 5506 // Perform stack overflow check if this goto needs it before jumping. |
| 5510 DeferredStackCheck* deferred_stack_check = | 5507 DeferredStackCheck* deferred_stack_check = |
| 5511 new(zone()) DeferredStackCheck(this, instr); | 5508 new(zone()) DeferredStackCheck(this, instr); |
| 5512 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 5509 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
| 5513 __ j(below, deferred_stack_check->entry()); | 5510 __ j(below, deferred_stack_check->entry()); |
| 5514 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 5511 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 5515 last_lazy_deopt_pc_ = masm()->pc_offset(); | |
| 5516 __ bind(instr->done_label()); | 5512 __ bind(instr->done_label()); |
| 5517 deferred_stack_check->SetExit(instr->done_label()); | 5513 deferred_stack_check->SetExit(instr->done_label()); |
| 5518 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5514 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 5519 // Don't record a deoptimization index for the safepoint here. | 5515 // Don't record a deoptimization index for the safepoint here. |
| 5520 // This will be done explicitly when emitting call and the safepoint in | 5516 // This will be done explicitly when emitting call and the safepoint in |
| 5521 // the deferred code. | 5517 // the deferred code. |
| 5522 } | 5518 } |
| 5523 } | 5519 } |
| 5524 | 5520 |
| 5525 | 5521 |
| (...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5626 FixedArray::kHeaderSize - kPointerSize)); | 5622 FixedArray::kHeaderSize - kPointerSize)); |
| 5627 __ bind(&done); | 5623 __ bind(&done); |
| 5628 } | 5624 } |
| 5629 | 5625 |
| 5630 | 5626 |
| 5631 #undef __ | 5627 #undef __ |
| 5632 | 5628 |
| 5633 } } // namespace v8::internal | 5629 } } // namespace v8::internal |
| 5634 | 5630 |
| 5635 #endif // V8_TARGET_ARCH_X64 | 5631 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |