OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 252 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
263 | 263 |
264 // Adjust the frame size, subsuming the unoptimized frame into the | 264 // Adjust the frame size, subsuming the unoptimized frame into the |
265 // optimized frame. | 265 // optimized frame. |
266 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots(); | 266 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots(); |
267 ASSERT(slots >= 0); | 267 ASSERT(slots >= 0); |
268 __ sub(sp, sp, Operand(slots * kPointerSize)); | 268 __ sub(sp, sp, Operand(slots * kPointerSize)); |
269 } | 269 } |
270 | 270 |
271 | 271 |
272 void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { | 272 void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { |
| 273 if (instr->IsCall()) { |
| 274 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 275 } |
273 if (!instr->IsLazyBailout() && !instr->IsGap()) { | 276 if (!instr->IsLazyBailout() && !instr->IsGap()) { |
274 safepoints_.BumpLastLazySafepointIndex(); | 277 safepoints_.BumpLastLazySafepointIndex(); |
275 } | 278 } |
276 } | 279 } |
277 | 280 |
278 | 281 |
279 bool LCodeGen::GenerateDeferredCode() { | 282 bool LCodeGen::GenerateDeferredCode() { |
280 ASSERT(is_generating()); | 283 ASSERT(is_generating()); |
281 if (deferred_.length() > 0) { | 284 if (deferred_.length() > 0) { |
282 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { | 285 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { |
(...skipping 5258 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5541 __ nop(); | 5544 __ nop(); |
5542 padding_size -= Assembler::kInstrSize; | 5545 padding_size -= Assembler::kInstrSize; |
5543 } | 5546 } |
5544 } | 5547 } |
5545 } | 5548 } |
5546 last_lazy_deopt_pc_ = masm()->pc_offset(); | 5549 last_lazy_deopt_pc_ = masm()->pc_offset(); |
5547 } | 5550 } |
5548 | 5551 |
5549 | 5552 |
5550 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 5553 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
5551 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 5554 last_lazy_deopt_pc_ = masm()->pc_offset(); |
5552 ASSERT(instr->HasEnvironment()); | 5555 ASSERT(instr->HasEnvironment()); |
5553 LEnvironment* env = instr->environment(); | 5556 LEnvironment* env = instr->environment(); |
5554 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5557 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
5555 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5558 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
5556 } | 5559 } |
5557 | 5560 |
5558 | 5561 |
5559 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 5562 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
5560 Deoptimizer::BailoutType type = instr->hydrogen()->type(); | 5563 Deoptimizer::BailoutType type = instr->hydrogen()->type(); |
5561 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the | 5564 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5615 Label done; | 5618 Label done; |
5616 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 5619 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
5617 __ cmp(sp, Operand(ip)); | 5620 __ cmp(sp, Operand(ip)); |
5618 __ b(hs, &done); | 5621 __ b(hs, &done); |
5619 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); | 5622 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); |
5620 ASSERT(instr->context()->IsRegister()); | 5623 ASSERT(instr->context()->IsRegister()); |
5621 ASSERT(ToRegister(instr->context()).is(cp)); | 5624 ASSERT(ToRegister(instr->context()).is(cp)); |
5622 CallCode(isolate()->builtins()->StackCheck(), | 5625 CallCode(isolate()->builtins()->StackCheck(), |
5623 RelocInfo::CODE_TARGET, | 5626 RelocInfo::CODE_TARGET, |
5624 instr); | 5627 instr); |
5625 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | |
5626 __ bind(&done); | 5628 __ bind(&done); |
5627 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | |
5628 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | |
5629 } else { | 5629 } else { |
5630 ASSERT(instr->hydrogen()->is_backwards_branch()); | 5630 ASSERT(instr->hydrogen()->is_backwards_branch()); |
5631 // Perform stack overflow check if this goto needs it before jumping. | 5631 // Perform stack overflow check if this goto needs it before jumping. |
5632 DeferredStackCheck* deferred_stack_check = | 5632 DeferredStackCheck* deferred_stack_check = |
5633 new(zone()) DeferredStackCheck(this, instr); | 5633 new(zone()) DeferredStackCheck(this, instr); |
5634 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 5634 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
5635 __ cmp(sp, Operand(ip)); | 5635 __ cmp(sp, Operand(ip)); |
5636 __ b(lo, deferred_stack_check->entry()); | 5636 __ b(lo, deferred_stack_check->entry()); |
5637 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 5637 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
5638 __ bind(instr->done_label()); | 5638 __ bind(instr->done_label()); |
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5750 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); | 5750 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); |
5751 __ ldr(result, FieldMemOperand(scratch, | 5751 __ ldr(result, FieldMemOperand(scratch, |
5752 FixedArray::kHeaderSize - kPointerSize)); | 5752 FixedArray::kHeaderSize - kPointerSize)); |
5753 __ bind(&done); | 5753 __ bind(&done); |
5754 } | 5754 } |
5755 | 5755 |
5756 | 5756 |
5757 #undef __ | 5757 #undef __ |
5758 | 5758 |
5759 } } // namespace v8::internal | 5759 } } // namespace v8::internal |
OLD | NEW |