OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 372 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
383 | 383 |
384 // Adjust the frame size, subsuming the unoptimized frame into the | 384 // Adjust the frame size, subsuming the unoptimized frame into the |
385 // optimized frame. | 385 // optimized frame. |
386 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots(); | 386 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots(); |
387 ASSERT(slots >= 1); | 387 ASSERT(slots >= 1); |
388 __ sub(esp, Immediate((slots - 1) * kPointerSize)); | 388 __ sub(esp, Immediate((slots - 1) * kPointerSize)); |
389 } | 389 } |
390 | 390 |
391 | 391 |
392 void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { | 392 void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { |
| 393 if (instr->IsCall()) { |
| 394 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 395 } |
393 if (!instr->IsLazyBailout() && !instr->IsGap()) { | 396 if (!instr->IsLazyBailout() && !instr->IsGap()) { |
394 safepoints_.BumpLastLazySafepointIndex(); | 397 safepoints_.BumpLastLazySafepointIndex(); |
395 } | 398 } |
396 if (!CpuFeatures::IsSupported(SSE2)) FlushX87StackIfNecessary(instr); | 399 if (!CpuFeatures::IsSupported(SSE2)) FlushX87StackIfNecessary(instr); |
397 } | 400 } |
398 | 401 |
399 | 402 |
400 void LCodeGen::GenerateBodyInstructionPost(LInstruction* instr) { | 403 void LCodeGen::GenerateBodyInstructionPost(LInstruction* instr) { |
401 if (!CpuFeatures::IsSupported(SSE2)) { | 404 if (!CpuFeatures::IsSupported(SSE2)) { |
402 if (instr->IsGoto()) { | 405 if (instr->IsGoto()) { |
(...skipping 5662 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6065 if (current_pc < last_lazy_deopt_pc_ + space_needed) { | 6068 if (current_pc < last_lazy_deopt_pc_ + space_needed) { |
6066 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; | 6069 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; |
6067 __ Nop(padding_size); | 6070 __ Nop(padding_size); |
6068 } | 6071 } |
6069 } | 6072 } |
6070 last_lazy_deopt_pc_ = masm()->pc_offset(); | 6073 last_lazy_deopt_pc_ = masm()->pc_offset(); |
6071 } | 6074 } |
6072 | 6075 |
6073 | 6076 |
6074 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 6077 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
6075 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 6078 last_lazy_deopt_pc_ = masm()->pc_offset(); |
6076 ASSERT(instr->HasEnvironment()); | 6079 ASSERT(instr->HasEnvironment()); |
6077 LEnvironment* env = instr->environment(); | 6080 LEnvironment* env = instr->environment(); |
6078 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 6081 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
6079 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 6082 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
6080 } | 6083 } |
6081 | 6084 |
6082 | 6085 |
6083 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 6086 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
6084 Deoptimizer::BailoutType type = instr->hydrogen()->type(); | 6087 Deoptimizer::BailoutType type = instr->hydrogen()->type(); |
6085 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the | 6088 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6141 ExternalReference stack_limit = | 6144 ExternalReference stack_limit = |
6142 ExternalReference::address_of_stack_limit(isolate()); | 6145 ExternalReference::address_of_stack_limit(isolate()); |
6143 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 6146 __ cmp(esp, Operand::StaticVariable(stack_limit)); |
6144 __ j(above_equal, &done, Label::kNear); | 6147 __ j(above_equal, &done, Label::kNear); |
6145 | 6148 |
6146 ASSERT(instr->context()->IsRegister()); | 6149 ASSERT(instr->context()->IsRegister()); |
6147 ASSERT(ToRegister(instr->context()).is(esi)); | 6150 ASSERT(ToRegister(instr->context()).is(esi)); |
6148 CallCode(isolate()->builtins()->StackCheck(), | 6151 CallCode(isolate()->builtins()->StackCheck(), |
6149 RelocInfo::CODE_TARGET, | 6152 RelocInfo::CODE_TARGET, |
6150 instr); | 6153 instr); |
6151 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | |
6152 __ bind(&done); | 6154 __ bind(&done); |
6153 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | |
6154 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | |
6155 } else { | 6155 } else { |
6156 ASSERT(instr->hydrogen()->is_backwards_branch()); | 6156 ASSERT(instr->hydrogen()->is_backwards_branch()); |
6157 // Perform stack overflow check if this goto needs it before jumping. | 6157 // Perform stack overflow check if this goto needs it before jumping. |
6158 DeferredStackCheck* deferred_stack_check = | 6158 DeferredStackCheck* deferred_stack_check = |
6159 new(zone()) DeferredStackCheck(this, instr, x87_stack_); | 6159 new(zone()) DeferredStackCheck(this, instr, x87_stack_); |
6160 ExternalReference stack_limit = | 6160 ExternalReference stack_limit = |
6161 ExternalReference::address_of_stack_limit(isolate()); | 6161 ExternalReference::address_of_stack_limit(isolate()); |
6162 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 6162 __ cmp(esp, Operand::StaticVariable(stack_limit)); |
6163 __ j(below, deferred_stack_check->entry()); | 6163 __ j(below, deferred_stack_check->entry()); |
6164 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 6164 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6273 FixedArray::kHeaderSize - kPointerSize)); | 6273 FixedArray::kHeaderSize - kPointerSize)); |
6274 __ bind(&done); | 6274 __ bind(&done); |
6275 } | 6275 } |
6276 | 6276 |
6277 | 6277 |
6278 #undef __ | 6278 #undef __ |
6279 | 6279 |
6280 } } // namespace v8::internal | 6280 } } // namespace v8::internal |
6281 | 6281 |
6282 #endif // V8_TARGET_ARCH_IA32 | 6282 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |