| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 253 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 264 if (FLAG_code_comments && instr->HasInterestingComment(this)) { | 264 if (FLAG_code_comments && instr->HasInterestingComment(this)) { |
| 265 Comment(";;; <@%d,#%d> %s", | 265 Comment(";;; <@%d,#%d> %s", |
| 266 current_instruction_, | 266 current_instruction_, |
| 267 instr->hydrogen_value()->id(), | 267 instr->hydrogen_value()->id(), |
| 268 instr->Mnemonic()); | 268 instr->Mnemonic()); |
| 269 } | 269 } |
| 270 | 270 |
| 271 instr->CompileToNative(this); | 271 instr->CompileToNative(this); |
| 272 } | 272 } |
| 273 EnsureSpaceForLazyDeopt(); | 273 EnsureSpaceForLazyDeopt(); |
| 274 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 274 return !is_aborted(); | 275 return !is_aborted(); |
| 275 } | 276 } |
| 276 | 277 |
| 277 | 278 |
| 278 bool LCodeGen::GenerateDeferredCode() { | 279 bool LCodeGen::GenerateDeferredCode() { |
| 279 ASSERT(is_generating()); | 280 ASSERT(is_generating()); |
| 280 if (deferred_.length() > 0) { | 281 if (deferred_.length() > 0) { |
| 281 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { | 282 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { |
| 282 LDeferredCode* code = deferred_[i]; | 283 LDeferredCode* code = deferred_[i]; |
| 283 Comment(";;; <@%d,#%d> " | 284 Comment(";;; <@%d,#%d> " |
| (...skipping 5343 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5627 int current_pc = masm()->pc_offset(); | 5628 int current_pc = masm()->pc_offset(); |
| 5628 int patch_size = Deoptimizer::patch_size(); | 5629 int patch_size = Deoptimizer::patch_size(); |
| 5629 if (current_pc < last_lazy_deopt_pc_ + patch_size) { | 5630 if (current_pc < last_lazy_deopt_pc_ + patch_size) { |
| 5630 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc; | 5631 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc; |
| 5631 ASSERT_EQ(0, padding_size % Assembler::kInstrSize); | 5632 ASSERT_EQ(0, padding_size % Assembler::kInstrSize); |
| 5632 while (padding_size > 0) { | 5633 while (padding_size > 0) { |
| 5633 __ nop(); | 5634 __ nop(); |
| 5634 padding_size -= Assembler::kInstrSize; | 5635 padding_size -= Assembler::kInstrSize; |
| 5635 } | 5636 } |
| 5636 } | 5637 } |
| 5637 last_lazy_deopt_pc_ = masm()->pc_offset(); | |
| 5638 } | 5638 } |
| 5639 | 5639 |
| 5640 | 5640 |
| 5641 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 5641 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
| 5642 EnsureSpaceForLazyDeopt(); | 5642 EnsureSpaceForLazyDeopt(); |
| 5643 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 5643 ASSERT(instr->HasEnvironment()); | 5644 ASSERT(instr->HasEnvironment()); |
| 5644 LEnvironment* env = instr->environment(); | 5645 LEnvironment* env = instr->environment(); |
| 5645 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5646 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 5646 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5647 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 5647 } | 5648 } |
| 5648 | 5649 |
| 5649 | 5650 |
| 5650 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 5651 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
| 5651 if (instr->hydrogen_value()->IsSoftDeoptimize()) { | 5652 if (instr->hydrogen_value()->IsSoftDeoptimize()) { |
| 5652 SoftDeoptimize(instr->environment(), zero_reg, Operand(zero_reg)); | 5653 SoftDeoptimize(instr->environment(), zero_reg, Operand(zero_reg)); |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5688 // There is no LLazyBailout instruction for stack-checks. We have to | 5689 // There is no LLazyBailout instruction for stack-checks. We have to |
| 5689 // prepare for lazy deoptimization explicitly here. | 5690 // prepare for lazy deoptimization explicitly here. |
| 5690 if (instr->hydrogen()->is_function_entry()) { | 5691 if (instr->hydrogen()->is_function_entry()) { |
| 5691 // Perform stack overflow check. | 5692 // Perform stack overflow check. |
| 5692 Label done; | 5693 Label done; |
| 5693 __ LoadRoot(at, Heap::kStackLimitRootIndex); | 5694 __ LoadRoot(at, Heap::kStackLimitRootIndex); |
| 5694 __ Branch(&done, hs, sp, Operand(at)); | 5695 __ Branch(&done, hs, sp, Operand(at)); |
| 5695 StackCheckStub stub; | 5696 StackCheckStub stub; |
| 5696 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 5697 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 5697 EnsureSpaceForLazyDeopt(); | 5698 EnsureSpaceForLazyDeopt(); |
| 5699 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 5698 __ bind(&done); | 5700 __ bind(&done); |
| 5699 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5701 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 5700 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5702 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 5701 } else { | 5703 } else { |
| 5702 ASSERT(instr->hydrogen()->is_backwards_branch()); | 5704 ASSERT(instr->hydrogen()->is_backwards_branch()); |
| 5703 // Perform stack overflow check if this goto needs it before jumping. | 5705 // Perform stack overflow check if this goto needs it before jumping. |
| 5704 DeferredStackCheck* deferred_stack_check = | 5706 DeferredStackCheck* deferred_stack_check = |
| 5705 new(zone()) DeferredStackCheck(this, instr); | 5707 new(zone()) DeferredStackCheck(this, instr); |
| 5706 __ LoadRoot(at, Heap::kStackLimitRootIndex); | 5708 __ LoadRoot(at, Heap::kStackLimitRootIndex); |
| 5707 __ Branch(deferred_stack_check->entry(), lo, sp, Operand(at)); | 5709 __ Branch(deferred_stack_check->entry(), lo, sp, Operand(at)); |
| 5708 EnsureSpaceForLazyDeopt(); | 5710 EnsureSpaceForLazyDeopt(); |
| 5711 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 5709 __ bind(instr->done_label()); | 5712 __ bind(instr->done_label()); |
| 5710 deferred_stack_check->SetExit(instr->done_label()); | 5713 deferred_stack_check->SetExit(instr->done_label()); |
| 5711 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5714 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 5712 // Don't record a deoptimization index for the safepoint here. | 5715 // Don't record a deoptimization index for the safepoint here. |
| 5713 // This will be done explicitly when emitting call and the safepoint in | 5716 // This will be done explicitly when emitting call and the safepoint in |
| 5714 // the deferred code. | 5717 // the deferred code. |
| 5715 } | 5718 } |
| 5716 } | 5719 } |
| 5717 | 5720 |
| 5718 | 5721 |
| (...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5821 __ Subu(scratch, result, scratch); | 5824 __ Subu(scratch, result, scratch); |
| 5822 __ lw(result, FieldMemOperand(scratch, | 5825 __ lw(result, FieldMemOperand(scratch, |
| 5823 FixedArray::kHeaderSize - kPointerSize)); | 5826 FixedArray::kHeaderSize - kPointerSize)); |
| 5824 __ bind(&done); | 5827 __ bind(&done); |
| 5825 } | 5828 } |
| 5826 | 5829 |
| 5827 | 5830 |
| 5828 #undef __ | 5831 #undef __ |
| 5829 | 5832 |
| 5830 } } // namespace v8::internal | 5833 } } // namespace v8::internal |
| OLD | NEW |