| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 42 class SafepointGenerator : public CallWrapper { | 42 class SafepointGenerator : public CallWrapper { |
| 43 public: | 43 public: |
| 44 SafepointGenerator(LCodeGen* codegen, | 44 SafepointGenerator(LCodeGen* codegen, |
| 45 LPointerMap* pointers, | 45 LPointerMap* pointers, |
| 46 Safepoint::DeoptMode mode) | 46 Safepoint::DeoptMode mode) |
| 47 : codegen_(codegen), | 47 : codegen_(codegen), |
| 48 pointers_(pointers), | 48 pointers_(pointers), |
| 49 deopt_mode_(mode) { } | 49 deopt_mode_(mode) { } |
| 50 virtual ~SafepointGenerator() { } | 50 virtual ~SafepointGenerator() { } |
| 51 | 51 |
| 52 virtual void BeforeCall(int call_size) const { } | 52 virtual void BeforeCall(int call_size) const { |
| 53 codegen_->EnsureSpaceForLazyDeopt(Deoptimizer::patch_size() - call_size); |
| 54 } |
| 53 | 55 |
| 54 virtual void AfterCall() const { | 56 virtual void AfterCall() const { |
| 55 codegen_->RecordSafepoint(pointers_, deopt_mode_); | 57 codegen_->RecordSafepoint(pointers_, deopt_mode_); |
| 56 } | 58 } |
| 57 | 59 |
| 58 private: | 60 private: |
| 59 LCodeGen* codegen_; | 61 LCodeGen* codegen_; |
| 60 LPointerMap* pointers_; | 62 LPointerMap* pointers_; |
| 61 Safepoint::DeoptMode deopt_mode_; | 63 Safepoint::DeoptMode deopt_mode_; |
| 62 }; | 64 }; |
| (...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 234 if (instr->IsLabel()) { | 236 if (instr->IsLabel()) { |
| 235 LLabel* label = LLabel::cast(instr); | 237 LLabel* label = LLabel::cast(instr); |
| 236 emit_instructions = !label->HasReplacement(); | 238 emit_instructions = !label->HasReplacement(); |
| 237 } | 239 } |
| 238 | 240 |
| 239 if (emit_instructions) { | 241 if (emit_instructions) { |
| 240 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic()); | 242 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic()); |
| 241 instr->CompileToNative(this); | 243 instr->CompileToNative(this); |
| 242 } | 244 } |
| 243 } | 245 } |
| 244 EnsureSpaceForLazyDeopt(); | 246 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 245 return !is_aborted(); | 247 return !is_aborted(); |
| 246 } | 248 } |
| 247 | 249 |
| 248 | 250 |
| 249 bool LCodeGen::GenerateJumpTable() { | 251 bool LCodeGen::GenerateJumpTable() { |
| 250 for (int i = 0; i < jump_table_.length(); i++) { | 252 for (int i = 0; i < jump_table_.length(); i++) { |
| 251 __ bind(&jump_table_[i].label); | 253 __ bind(&jump_table_[i].label); |
| 252 __ Jump(jump_table_[i].address, RelocInfo::RUNTIME_ENTRY); | 254 __ Jump(jump_table_[i].address, RelocInfo::RUNTIME_ENTRY); |
| 253 } | 255 } |
| 254 return !is_aborted(); | 256 return !is_aborted(); |
| (...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 432 UNREACHABLE(); | 434 UNREACHABLE(); |
| 433 } | 435 } |
| 434 } | 436 } |
| 435 | 437 |
| 436 | 438 |
| 437 void LCodeGen::CallCodeGeneric(Handle<Code> code, | 439 void LCodeGen::CallCodeGeneric(Handle<Code> code, |
| 438 RelocInfo::Mode mode, | 440 RelocInfo::Mode mode, |
| 439 LInstruction* instr, | 441 LInstruction* instr, |
| 440 SafepointMode safepoint_mode, | 442 SafepointMode safepoint_mode, |
| 441 int argc) { | 443 int argc) { |
| 444 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size() - masm()->CallSize(code)); |
| 442 ASSERT(instr != NULL); | 445 ASSERT(instr != NULL); |
| 443 LPointerMap* pointers = instr->pointer_map(); | 446 LPointerMap* pointers = instr->pointer_map(); |
| 444 RecordPosition(pointers->position()); | 447 RecordPosition(pointers->position()); |
| 445 __ call(code, mode); | 448 __ call(code, mode); |
| 446 RecordSafepointWithLazyDeopt(instr, safepoint_mode, argc); | 449 RecordSafepointWithLazyDeopt(instr, safepoint_mode, argc); |
| 447 | 450 |
| 448 // Signal that we don't inline smi code before these stubs in the | 451 // Signal that we don't inline smi code before these stubs in the |
| 449 // optimizing code generator. | 452 // optimizing code generator. |
| 450 if (code->kind() == Code::BINARY_OP_IC || | 453 if (code->kind() == Code::BINARY_OP_IC || |
| 451 code->kind() == Code::COMPARE_IC) { | 454 code->kind() == Code::COMPARE_IC) { |
| (...skipping 3717 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4169 __ j(not_equal, &check_frame_marker, Label::kNear); | 4172 __ j(not_equal, &check_frame_marker, Label::kNear); |
| 4170 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset)); | 4173 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset)); |
| 4171 | 4174 |
| 4172 // Check the marker in the calling frame. | 4175 // Check the marker in the calling frame. |
| 4173 __ bind(&check_frame_marker); | 4176 __ bind(&check_frame_marker); |
| 4174 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), | 4177 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), |
| 4175 Smi::FromInt(StackFrame::CONSTRUCT)); | 4178 Smi::FromInt(StackFrame::CONSTRUCT)); |
| 4176 } | 4179 } |
| 4177 | 4180 |
| 4178 | 4181 |
| 4179 void LCodeGen::EnsureSpaceForLazyDeopt() { | 4182 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { |
| 4180 // Ensure that we have enough space after the previous lazy-bailout | 4183 // Ensure that we have enough space after the previous lazy-bailout |
| 4181 // instruction for patching the code here. | 4184 // instruction for patching the code here. |
| 4182 int current_pc = masm()->pc_offset(); | 4185 int current_pc = masm()->pc_offset(); |
| 4183 int patch_size = Deoptimizer::patch_size(); | 4186 if (current_pc < last_lazy_deopt_pc_ + space_needed) { |
| 4184 if (current_pc < last_lazy_deopt_pc_ + patch_size) { | 4187 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; |
| 4185 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc; | |
| 4186 while (padding_size > 0) { | 4188 while (padding_size > 0) { |
| 4187 int nop_size = padding_size > 9 ? 9 : padding_size; | 4189 int nop_size = padding_size > 9 ? 9 : padding_size; |
| 4188 __ nop(nop_size); | 4190 __ nop(nop_size); |
| 4189 padding_size -= nop_size; | 4191 padding_size -= nop_size; |
| 4190 } | 4192 } |
| 4191 } | 4193 } |
| 4192 last_lazy_deopt_pc_ = masm()->pc_offset(); | |
| 4193 } | 4194 } |
| 4194 | 4195 |
| 4195 | 4196 |
| 4196 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 4197 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
| 4197 EnsureSpaceForLazyDeopt(); | 4198 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 4199 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 4198 ASSERT(instr->HasEnvironment()); | 4200 ASSERT(instr->HasEnvironment()); |
| 4199 LEnvironment* env = instr->environment(); | 4201 LEnvironment* env = instr->environment(); |
| 4200 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 4202 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 4201 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 4203 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 4202 } | 4204 } |
| 4203 | 4205 |
| 4204 | 4206 |
| 4205 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 4207 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
| 4206 DeoptimizeIf(no_condition, instr->environment()); | 4208 DeoptimizeIf(no_condition, instr->environment()); |
| 4207 } | 4209 } |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4258 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } | 4260 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } |
| 4259 virtual LInstruction* instr() { return instr_; } | 4261 virtual LInstruction* instr() { return instr_; } |
| 4260 private: | 4262 private: |
| 4261 LStackCheck* instr_; | 4263 LStackCheck* instr_; |
| 4262 }; | 4264 }; |
| 4263 | 4265 |
| 4264 DeferredStackCheck* deferred_stack_check = | 4266 DeferredStackCheck* deferred_stack_check = |
| 4265 new DeferredStackCheck(this, instr); | 4267 new DeferredStackCheck(this, instr); |
| 4266 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 4268 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
| 4267 __ j(below, deferred_stack_check->entry()); | 4269 __ j(below, deferred_stack_check->entry()); |
| 4268 EnsureSpaceForLazyDeopt(); | 4270 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 4269 __ bind(instr->done_label()); | 4271 __ bind(instr->done_label()); |
| 4270 deferred_stack_check->SetExit(instr->done_label()); | 4272 deferred_stack_check->SetExit(instr->done_label()); |
| 4271 // There is no LLazyBailout instruction for stack-checks. We have to | 4273 // There is no LLazyBailout instruction for stack-checks. We have to |
| 4272 // prepare for lazy deoptimization explicitly here. | 4274 // prepare for lazy deoptimization explicitly here. |
| 4273 ASSERT(instr->HasEnvironment()); | 4275 ASSERT(instr->HasEnvironment()); |
| 4274 LEnvironment* env = instr->environment(); | 4276 LEnvironment* env = instr->environment(); |
| 4275 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 4277 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 4276 // Don't record a deoptimization index for the safepoint here. | 4278 // Don't record a deoptimization index for the safepoint here. |
| 4277 // This will be done explicitly when emitting call and the safepoint in | 4279 // This will be done explicitly when emitting call and the safepoint in |
| 4278 // the deferred code. | 4280 // the deferred code. |
| (...skipping 14 matching lines...) Expand all Loading... |
| 4293 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 4295 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
| 4294 ASSERT(osr_pc_offset_ == -1); | 4296 ASSERT(osr_pc_offset_ == -1); |
| 4295 osr_pc_offset_ = masm()->pc_offset(); | 4297 osr_pc_offset_ = masm()->pc_offset(); |
| 4296 } | 4298 } |
| 4297 | 4299 |
| 4298 #undef __ | 4300 #undef __ |
| 4299 | 4301 |
| 4300 } } // namespace v8::internal | 4302 } } // namespace v8::internal |
| 4301 | 4303 |
| 4302 #endif // V8_TARGET_ARCH_X64 | 4304 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |