| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 42 class SafepointGenerator : public CallWrapper { | 42 class SafepointGenerator : public CallWrapper { |
| 43 public: | 43 public: |
| 44 SafepointGenerator(LCodeGen* codegen, | 44 SafepointGenerator(LCodeGen* codegen, |
| 45 LPointerMap* pointers, | 45 LPointerMap* pointers, |
| 46 Safepoint::DeoptMode mode) | 46 Safepoint::DeoptMode mode) |
| 47 : codegen_(codegen), | 47 : codegen_(codegen), |
| 48 pointers_(pointers), | 48 pointers_(pointers), |
| 49 deopt_mode_(mode) { } | 49 deopt_mode_(mode) { } |
| 50 virtual ~SafepointGenerator() { } | 50 virtual ~SafepointGenerator() { } |
| 51 | 51 |
| 52 virtual void BeforeCall(int call_size) const { } | 52 virtual void BeforeCall(int call_size) const { |
| 53 codegen_->EnsureSpaceForLazyDeopt(Deoptimizer::patch_size() - call_size); |
| 54 } |
| 53 | 55 |
| 54 virtual void AfterCall() const { | 56 virtual void AfterCall() const { |
| 55 codegen_->RecordSafepoint(pointers_, deopt_mode_); | 57 codegen_->RecordSafepoint(pointers_, deopt_mode_); |
| 56 } | 58 } |
| 57 | 59 |
| 58 private: | 60 private: |
| 59 LCodeGen* codegen_; | 61 LCodeGen* codegen_; |
| 60 LPointerMap* pointers_; | 62 LPointerMap* pointers_; |
| 61 Safepoint::DeoptMode deopt_mode_; | 63 Safepoint::DeoptMode deopt_mode_; |
| 62 }; | 64 }; |
| (...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 234 if (instr->IsLabel()) { | 236 if (instr->IsLabel()) { |
| 235 LLabel* label = LLabel::cast(instr); | 237 LLabel* label = LLabel::cast(instr); |
| 236 emit_instructions = !label->HasReplacement(); | 238 emit_instructions = !label->HasReplacement(); |
| 237 } | 239 } |
| 238 | 240 |
| 239 if (emit_instructions) { | 241 if (emit_instructions) { |
| 240 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic()); | 242 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic()); |
| 241 instr->CompileToNative(this); | 243 instr->CompileToNative(this); |
| 242 } | 244 } |
| 243 } | 245 } |
| 244 EnsureSpaceForLazyDeopt(); | 246 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 245 return !is_aborted(); | 247 return !is_aborted(); |
| 246 } | 248 } |
| 247 | 249 |
| 248 | 250 |
| 249 bool LCodeGen::GenerateJumpTable() { | 251 bool LCodeGen::GenerateJumpTable() { |
| 250 for (int i = 0; i < jump_table_.length(); i++) { | 252 for (int i = 0; i < jump_table_.length(); i++) { |
| 251 __ bind(&jump_table_[i].label); | 253 __ bind(&jump_table_[i].label); |
| 252 __ Jump(jump_table_[i].address, RelocInfo::RUNTIME_ENTRY); | 254 __ Jump(jump_table_[i].address, RelocInfo::RUNTIME_ENTRY); |
| 253 } | 255 } |
| 254 return !is_aborted(); | 256 return !is_aborted(); |
| (...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 432 UNREACHABLE(); | 434 UNREACHABLE(); |
| 433 } | 435 } |
| 434 } | 436 } |
| 435 | 437 |
| 436 | 438 |
| 437 void LCodeGen::CallCodeGeneric(Handle<Code> code, | 439 void LCodeGen::CallCodeGeneric(Handle<Code> code, |
| 438 RelocInfo::Mode mode, | 440 RelocInfo::Mode mode, |
| 439 LInstruction* instr, | 441 LInstruction* instr, |
| 440 SafepointMode safepoint_mode, | 442 SafepointMode safepoint_mode, |
| 441 int argc) { | 443 int argc) { |
| 444 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size() - masm()->CallSize(code)); |
| 442 ASSERT(instr != NULL); | 445 ASSERT(instr != NULL); |
| 443 LPointerMap* pointers = instr->pointer_map(); | 446 LPointerMap* pointers = instr->pointer_map(); |
| 444 RecordPosition(pointers->position()); | 447 RecordPosition(pointers->position()); |
| 445 __ call(code, mode); | 448 __ call(code, mode); |
| 446 RecordSafepointWithLazyDeopt(instr, safepoint_mode, argc); | 449 RecordSafepointWithLazyDeopt(instr, safepoint_mode, argc); |
| 447 | 450 |
| 448 // Signal that we don't inline smi code before these stubs in the | 451 // Signal that we don't inline smi code before these stubs in the |
| 449 // optimizing code generator. | 452 // optimizing code generator. |
| 450 if (code->kind() == Code::BINARY_OP_IC || | 453 if (code->kind() == Code::BINARY_OP_IC || |
| 451 code->kind() == Code::COMPARE_IC) { | 454 code->kind() == Code::COMPARE_IC) { |
| (...skipping 3717 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4169 __ j(not_equal, &check_frame_marker, Label::kNear); | 4172 __ j(not_equal, &check_frame_marker, Label::kNear); |
| 4170 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset)); | 4173 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset)); |
| 4171 | 4174 |
| 4172 // Check the marker in the calling frame. | 4175 // Check the marker in the calling frame. |
| 4173 __ bind(&check_frame_marker); | 4176 __ bind(&check_frame_marker); |
| 4174 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), | 4177 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), |
| 4175 Smi::FromInt(StackFrame::CONSTRUCT)); | 4178 Smi::FromInt(StackFrame::CONSTRUCT)); |
| 4176 } | 4179 } |
| 4177 | 4180 |
| 4178 | 4181 |
| 4179 void LCodeGen::EnsureSpaceForLazyDeopt() { | 4182 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { |
| 4180 // Ensure that we have enough space after the previous lazy-bailout | 4183 // Ensure that we have enough space after the previous lazy-bailout |
| 4181 // instruction for patching the code here. | 4184 // instruction for patching the code here. |
| 4182 int current_pc = masm()->pc_offset(); | 4185 int current_pc = masm()->pc_offset(); |
| 4183 int patch_size = Deoptimizer::patch_size(); | 4186 if (current_pc < last_lazy_deopt_pc_ + space_needed) { |
| 4184 if (current_pc < last_lazy_deopt_pc_ + patch_size) { | 4187 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; |
| 4185 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc; | |
| 4186 while (padding_size > 0) { | 4188 while (padding_size > 0) { |
| 4187 int nop_size = padding_size > 9 ? 9 : padding_size; | 4189 int nop_size = padding_size > 9 ? 9 : padding_size; |
| 4188 __ nop(nop_size); | 4190 __ nop(nop_size); |
| 4189 padding_size -= nop_size; | 4191 padding_size -= nop_size; |
| 4190 } | 4192 } |
| 4191 } | 4193 } |
| 4192 last_lazy_deopt_pc_ = masm()->pc_offset(); | |
| 4193 } | 4194 } |
| 4194 | 4195 |
| 4195 | 4196 |
| 4196 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 4197 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
| 4197 EnsureSpaceForLazyDeopt(); | 4198 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 4199 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 4198 ASSERT(instr->HasEnvironment()); | 4200 ASSERT(instr->HasEnvironment()); |
| 4199 LEnvironment* env = instr->environment(); | 4201 LEnvironment* env = instr->environment(); |
| 4200 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 4202 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 4201 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 4203 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 4202 } | 4204 } |
| 4203 | 4205 |
| 4204 | 4206 |
| 4205 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 4207 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
| 4206 DeoptimizeIf(no_condition, instr->environment()); | 4208 DeoptimizeIf(no_condition, instr->environment()); |
| 4207 } | 4209 } |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4265 LEnvironment* env = instr->environment(); | 4267 LEnvironment* env = instr->environment(); |
| 4266 // There is no LLazyBailout instruction for stack-checks. We have to | 4268 // There is no LLazyBailout instruction for stack-checks. We have to |
| 4267 // prepare for lazy deoptimization explicitly here. | 4269 // prepare for lazy deoptimization explicitly here. |
| 4268 if (instr->hydrogen()->is_function_entry()) { | 4270 if (instr->hydrogen()->is_function_entry()) { |
| 4269 // Perform stack overflow check. | 4271 // Perform stack overflow check. |
| 4270 Label done; | 4272 Label done; |
| 4271 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 4273 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
| 4272 __ j(above_equal, &done, Label::kNear); | 4274 __ j(above_equal, &done, Label::kNear); |
| 4273 StackCheckStub stub; | 4275 StackCheckStub stub; |
| 4274 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 4276 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 4275 EnsureSpaceForLazyDeopt(); | 4277 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 4278 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 4276 __ bind(&done); | 4279 __ bind(&done); |
| 4277 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 4280 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 4278 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 4281 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 4279 } else { | 4282 } else { |
| 4280 ASSERT(instr->hydrogen()->is_backwards_branch()); | 4283 ASSERT(instr->hydrogen()->is_backwards_branch()); |
| 4281 // Perform stack overflow check if this goto needs it before jumping. | 4284 // Perform stack overflow check if this goto needs it before jumping. |
| 4282 DeferredStackCheck* deferred_stack_check = | 4285 DeferredStackCheck* deferred_stack_check = |
| 4283 new DeferredStackCheck(this, instr); | 4286 new DeferredStackCheck(this, instr); |
| 4284 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 4287 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
| 4285 __ j(below, deferred_stack_check->entry()); | 4288 __ j(below, deferred_stack_check->entry()); |
| 4286 EnsureSpaceForLazyDeopt(); | 4289 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 4290 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 4287 __ bind(instr->done_label()); | 4291 __ bind(instr->done_label()); |
| 4288 deferred_stack_check->SetExit(instr->done_label()); | 4292 deferred_stack_check->SetExit(instr->done_label()); |
| 4289 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 4293 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 4290 // Don't record a deoptimization index for the safepoint here. | 4294 // Don't record a deoptimization index for the safepoint here. |
| 4291 // This will be done explicitly when emitting call and the safepoint in | 4295 // This will be done explicitly when emitting call and the safepoint in |
| 4292 // the deferred code. | 4296 // the deferred code. |
| 4293 } | 4297 } |
| 4294 } | 4298 } |
| 4295 | 4299 |
| 4296 | 4300 |
| (...skipping 11 matching lines...) Expand all Loading... |
| 4308 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 4312 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
| 4309 ASSERT(osr_pc_offset_ == -1); | 4313 ASSERT(osr_pc_offset_ == -1); |
| 4310 osr_pc_offset_ = masm()->pc_offset(); | 4314 osr_pc_offset_ = masm()->pc_offset(); |
| 4311 } | 4315 } |
| 4312 | 4316 |
| 4313 #undef __ | 4317 #undef __ |
| 4314 | 4318 |
| 4315 } } // namespace v8::internal | 4319 } } // namespace v8::internal |
| 4316 | 4320 |
| 4317 #endif // V8_TARGET_ARCH_X64 | 4321 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |