| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 256 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 267 | 267 |
| 268 // Adjust the frame size, subsuming the unoptimized frame into the | 268 // Adjust the frame size, subsuming the unoptimized frame into the |
| 269 // optimized frame. | 269 // optimized frame. |
| 270 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots(); | 270 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots(); |
| 271 ASSERT(slots >= 0); | 271 ASSERT(slots >= 0); |
| 272 __ subq(rsp, Immediate(slots * kPointerSize)); | 272 __ subq(rsp, Immediate(slots * kPointerSize)); |
| 273 } | 273 } |
| 274 | 274 |
| 275 | 275 |
| 276 void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { | 276 void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { |
| 277 if (instr->IsCall()) { |
| 278 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 279 } |
| 277 if (!instr->IsLazyBailout() && !instr->IsGap()) { | 280 if (!instr->IsLazyBailout() && !instr->IsGap()) { |
| 278 safepoints_.BumpLastLazySafepointIndex(); | 281 safepoints_.BumpLastLazySafepointIndex(); |
| 279 } | 282 } |
| 280 } | 283 } |
| 281 | 284 |
| 282 | 285 |
| 283 bool LCodeGen::GenerateJumpTable() { | 286 bool LCodeGen::GenerateJumpTable() { |
| 284 Label needs_frame; | 287 Label needs_frame; |
| 285 if (jump_table_.length() > 0) { | 288 if (jump_table_.length() > 0) { |
| 286 Comment(";;; -------------------- Jump table --------------------"); | 289 Comment(";;; -------------------- Jump table --------------------"); |
| (...skipping 5071 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5358 if (current_pc < last_lazy_deopt_pc_ + space_needed) { | 5361 if (current_pc < last_lazy_deopt_pc_ + space_needed) { |
| 5359 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; | 5362 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; |
| 5360 __ Nop(padding_size); | 5363 __ Nop(padding_size); |
| 5361 } | 5364 } |
| 5362 } | 5365 } |
| 5363 last_lazy_deopt_pc_ = masm()->pc_offset(); | 5366 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 5364 } | 5367 } |
| 5365 | 5368 |
| 5366 | 5369 |
| 5367 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 5370 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
| 5368 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 5371 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 5369 ASSERT(instr->HasEnvironment()); | 5372 ASSERT(instr->HasEnvironment()); |
| 5370 LEnvironment* env = instr->environment(); | 5373 LEnvironment* env = instr->environment(); |
| 5371 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5374 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 5372 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5375 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 5373 } | 5376 } |
| 5374 | 5377 |
| 5375 | 5378 |
| 5376 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 5379 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
| 5377 Deoptimizer::BailoutType type = instr->hydrogen()->type(); | 5380 Deoptimizer::BailoutType type = instr->hydrogen()->type(); |
| 5378 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the | 5381 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5430 // Perform stack overflow check. | 5433 // Perform stack overflow check. |
| 5431 Label done; | 5434 Label done; |
| 5432 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 5435 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
| 5433 __ j(above_equal, &done, Label::kNear); | 5436 __ j(above_equal, &done, Label::kNear); |
| 5434 | 5437 |
| 5435 ASSERT(instr->context()->IsRegister()); | 5438 ASSERT(instr->context()->IsRegister()); |
| 5436 ASSERT(ToRegister(instr->context()).is(rsi)); | 5439 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 5437 CallCode(isolate()->builtins()->StackCheck(), | 5440 CallCode(isolate()->builtins()->StackCheck(), |
| 5438 RelocInfo::CODE_TARGET, | 5441 RelocInfo::CODE_TARGET, |
| 5439 instr); | 5442 instr); |
| 5440 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | |
| 5441 __ bind(&done); | 5443 __ bind(&done); |
| 5442 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | |
| 5443 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | |
| 5444 } else { | 5444 } else { |
| 5445 ASSERT(instr->hydrogen()->is_backwards_branch()); | 5445 ASSERT(instr->hydrogen()->is_backwards_branch()); |
| 5446 // Perform stack overflow check if this goto needs it before jumping. | 5446 // Perform stack overflow check if this goto needs it before jumping. |
| 5447 DeferredStackCheck* deferred_stack_check = | 5447 DeferredStackCheck* deferred_stack_check = |
| 5448 new(zone()) DeferredStackCheck(this, instr); | 5448 new(zone()) DeferredStackCheck(this, instr); |
| 5449 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 5449 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
| 5450 __ j(below, deferred_stack_check->entry()); | 5450 __ j(below, deferred_stack_check->entry()); |
| 5451 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 5451 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 5452 __ bind(instr->done_label()); | 5452 __ bind(instr->done_label()); |
| 5453 deferred_stack_check->SetExit(instr->done_label()); | 5453 deferred_stack_check->SetExit(instr->done_label()); |
| (...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5562 FixedArray::kHeaderSize - kPointerSize)); | 5562 FixedArray::kHeaderSize - kPointerSize)); |
| 5563 __ bind(&done); | 5563 __ bind(&done); |
| 5564 } | 5564 } |
| 5565 | 5565 |
| 5566 | 5566 |
| 5567 #undef __ | 5567 #undef __ |
| 5568 | 5568 |
| 5569 } } // namespace v8::internal | 5569 } } // namespace v8::internal |
| 5570 | 5570 |
| 5571 #endif // V8_TARGET_ARCH_X64 | 5571 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |