| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 113 info()->CommitDependencies(code); | 113 info()->CommitDependencies(code); |
| 114 } | 114 } |
| 115 | 115 |
| 116 | 116 |
| 117 void LCodeGen::Abort(BailoutReason reason) { | 117 void LCodeGen::Abort(BailoutReason reason) { |
| 118 info()->set_bailout_reason(reason); | 118 info()->set_bailout_reason(reason); |
| 119 status_ = ABORTED; | 119 status_ = ABORTED; |
| 120 } | 120 } |
| 121 | 121 |
| 122 | 122 |
| 123 void LCodeGen::Comment(const char* format, ...) { | |
| 124 if (!FLAG_code_comments) return; | |
| 125 char buffer[4 * KB]; | |
| 126 StringBuilder builder(buffer, ARRAY_SIZE(buffer)); | |
| 127 va_list arguments; | |
| 128 va_start(arguments, format); | |
| 129 builder.AddFormattedList(format, arguments); | |
| 130 va_end(arguments); | |
| 131 | |
| 132 // Copy the string before recording it in the assembler to avoid | |
| 133 // issues when the stack allocated buffer goes out of scope. | |
| 134 size_t length = builder.position(); | |
| 135 Vector<char> copy = Vector<char>::New(length + 1); | |
| 136 OS::MemCopy(copy.start(), builder.Finalize(), copy.length()); | |
| 137 masm()->RecordComment(copy.start()); | |
| 138 } | |
| 139 | |
| 140 | |
| 141 #ifdef _MSC_VER | 123 #ifdef _MSC_VER |
| 142 void LCodeGen::MakeSureStackPagesMapped(int offset) { | 124 void LCodeGen::MakeSureStackPagesMapped(int offset) { |
| 143 const int kPageSize = 4 * KB; | 125 const int kPageSize = 4 * KB; |
| 144 for (offset -= kPageSize; offset > 0; offset -= kPageSize) { | 126 for (offset -= kPageSize; offset > 0; offset -= kPageSize) { |
| 145 __ mov(Operand(esp, offset), eax); | 127 __ mov(Operand(esp, offset), eax); |
| 146 } | 128 } |
| 147 } | 129 } |
| 148 #endif | 130 #endif |
| 149 | 131 |
| 150 | 132 |
| (...skipping 226 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 377 __ mov(alignment_loc, edx); | 359 __ mov(alignment_loc, edx); |
| 378 | 360 |
| 379 // Adjust the frame size, subsuming the unoptimized frame into the | 361 // Adjust the frame size, subsuming the unoptimized frame into the |
| 380 // optimized frame. | 362 // optimized frame. |
| 381 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots(); | 363 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots(); |
| 382 ASSERT(slots >= 1); | 364 ASSERT(slots >= 1); |
| 383 __ sub(esp, Immediate((slots - 1) * kPointerSize)); | 365 __ sub(esp, Immediate((slots - 1) * kPointerSize)); |
| 384 } | 366 } |
| 385 | 367 |
| 386 | 368 |
| 387 bool LCodeGen::GenerateBody() { | 369 void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { |
| 388 ASSERT(is_generating()); | 370 if (!CpuFeatures::IsSupported(SSE2)) FlushX87StackIfNecessary(instr); |
| 389 bool emit_instructions = true; | |
| 390 for (current_instruction_ = 0; | |
| 391 !is_aborted() && current_instruction_ < instructions_->length(); | |
| 392 current_instruction_++) { | |
| 393 LInstruction* instr = instructions_->at(current_instruction_); | |
| 394 | |
| 395 // Don't emit code for basic blocks with a replacement. | |
| 396 if (instr->IsLabel()) { | |
| 397 emit_instructions = !LLabel::cast(instr)->HasReplacement(); | |
| 398 } | |
| 399 if (!emit_instructions) continue; | |
| 400 | |
| 401 if (FLAG_code_comments && instr->HasInterestingComment(this)) { | |
| 402 Comment(";;; <@%d,#%d> %s", | |
| 403 current_instruction_, | |
| 404 instr->hydrogen_value()->id(), | |
| 405 instr->Mnemonic()); | |
| 406 } | |
| 407 | |
| 408 if (!CpuFeatures::IsSupported(SSE2)) FlushX87StackIfNecessary(instr); | |
| 409 | |
| 410 RecordAndUpdatePosition(instr->position()); | |
| 411 | |
| 412 instr->CompileToNative(this); | |
| 413 | |
| 414 if (!CpuFeatures::IsSupported(SSE2)) { | |
| 415 if (instr->IsGoto()) { | |
| 416 x87_stack_.LeavingBlock(current_block_, LGoto::cast(instr)); | |
| 417 } else if (FLAG_debug_code && FLAG_enable_slow_asserts && | |
| 418 !instr->IsGap() && !instr->IsReturn()) { | |
| 419 if (instr->ClobbersDoubleRegisters()) { | |
| 420 if (instr->HasDoubleRegisterResult()) { | |
| 421 ASSERT_EQ(1, x87_stack_.depth()); | |
| 422 } else { | |
| 423 ASSERT_EQ(0, x87_stack_.depth()); | |
| 424 } | |
| 425 } | |
| 426 __ VerifyX87StackDepth(x87_stack_.depth()); | |
| 427 } | |
| 428 } | |
| 429 } | |
| 430 EnsureSpaceForLazyDeopt(); | |
| 431 return !is_aborted(); | |
| 432 } | 371 } |
| 433 | 372 |
| 434 | 373 |
| 374 void LCodeGen::GenerateBodyInstructionPost(LInstruction* instr) { |
| 375 if (!CpuFeatures::IsSupported(SSE2)) { |
| 376 if (instr->IsGoto()) { |
| 377 x87_stack_.LeavingBlock(current_block_, LGoto::cast(instr)); |
| 378 } else if (FLAG_debug_code && FLAG_enable_slow_asserts && |
| 379 !instr->IsGap() && !instr->IsReturn()) { |
| 380 if (instr->ClobbersDoubleRegisters()) { |
| 381 if (instr->HasDoubleRegisterResult()) { |
| 382 ASSERT_EQ(1, x87_stack_.depth()); |
| 383 } else { |
| 384 ASSERT_EQ(0, x87_stack_.depth()); |
| 385 } |
| 386 } |
| 387 __ VerifyX87StackDepth(x87_stack_.depth()); |
| 388 } |
| 389 } |
| 390 } |
| 391 |
| 392 |
| 435 bool LCodeGen::GenerateJumpTable() { | 393 bool LCodeGen::GenerateJumpTable() { |
| 436 Label needs_frame; | 394 Label needs_frame; |
| 437 if (jump_table_.length() > 0) { | 395 if (jump_table_.length() > 0) { |
| 438 Comment(";;; -------------------- Jump table --------------------"); | 396 Comment(";;; -------------------- Jump table --------------------"); |
| 439 } | 397 } |
| 440 for (int i = 0; i < jump_table_.length(); i++) { | 398 for (int i = 0; i < jump_table_.length(); i++) { |
| 441 __ bind(&jump_table_[i].label); | 399 __ bind(&jump_table_[i].label); |
| 442 Address entry = jump_table_[i].address; | 400 Address entry = jump_table_[i].address; |
| 443 Deoptimizer::BailoutType type = jump_table_[i].bailout_type; | 401 Deoptimizer::BailoutType type = jump_table_[i].bailout_type; |
| 444 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); | 402 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); |
| (...skipping 1889 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2334 ASSERT(ToRegister(instr->left()).is(edx)); | 2292 ASSERT(ToRegister(instr->left()).is(edx)); |
| 2335 ASSERT(ToRegister(instr->right()).is(eax)); | 2293 ASSERT(ToRegister(instr->right()).is(eax)); |
| 2336 ASSERT(ToRegister(instr->result()).is(eax)); | 2294 ASSERT(ToRegister(instr->result()).is(eax)); |
| 2337 | 2295 |
| 2338 BinaryOpStub stub(instr->op(), NO_OVERWRITE); | 2296 BinaryOpStub stub(instr->op(), NO_OVERWRITE); |
| 2339 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 2297 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 2340 __ nop(); // Signals no inlined code. | 2298 __ nop(); // Signals no inlined code. |
| 2341 } | 2299 } |
| 2342 | 2300 |
| 2343 | 2301 |
| 2344 int LCodeGen::GetNextEmittedBlock() const { | |
| 2345 for (int i = current_block_ + 1; i < graph()->blocks()->length(); ++i) { | |
| 2346 if (!chunk_->GetLabel(i)->HasReplacement()) return i; | |
| 2347 } | |
| 2348 return -1; | |
| 2349 } | |
| 2350 | |
| 2351 | |
| 2352 template<class InstrType> | 2302 template<class InstrType> |
| 2353 void LCodeGen::EmitBranch(InstrType instr, Condition cc) { | 2303 void LCodeGen::EmitBranch(InstrType instr, Condition cc) { |
| 2354 int left_block = instr->TrueDestination(chunk_); | 2304 int left_block = instr->TrueDestination(chunk_); |
| 2355 int right_block = instr->FalseDestination(chunk_); | 2305 int right_block = instr->FalseDestination(chunk_); |
| 2356 | 2306 |
| 2357 int next_block = GetNextEmittedBlock(); | 2307 int next_block = GetNextEmittedBlock(); |
| 2358 | 2308 |
| 2359 if (right_block == left_block || cc == no_condition) { | 2309 if (right_block == left_block || cc == no_condition) { |
| 2360 EmitGoto(left_block); | 2310 EmitGoto(left_block); |
| 2361 } else if (left_block == next_block) { | 2311 } else if (left_block == next_block) { |
| (...skipping 3824 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6186 __ j(not_equal, &check_frame_marker, Label::kNear); | 6136 __ j(not_equal, &check_frame_marker, Label::kNear); |
| 6187 __ mov(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset)); | 6137 __ mov(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset)); |
| 6188 | 6138 |
| 6189 // Check the marker in the calling frame. | 6139 // Check the marker in the calling frame. |
| 6190 __ bind(&check_frame_marker); | 6140 __ bind(&check_frame_marker); |
| 6191 __ cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), | 6141 __ cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), |
| 6192 Immediate(Smi::FromInt(StackFrame::CONSTRUCT))); | 6142 Immediate(Smi::FromInt(StackFrame::CONSTRUCT))); |
| 6193 } | 6143 } |
| 6194 | 6144 |
| 6195 | 6145 |
| 6196 void LCodeGen::EnsureSpaceForLazyDeopt() { | 6146 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { |
| 6197 if (!info()->IsStub()) { | 6147 if (!info()->IsStub()) { |
| 6198 // Ensure that we have enough space after the previous lazy-bailout | 6148 // Ensure that we have enough space after the previous lazy-bailout |
| 6199 // instruction for patching the code here. | 6149 // instruction for patching the code here. |
| 6200 int current_pc = masm()->pc_offset(); | 6150 int current_pc = masm()->pc_offset(); |
| 6201 int patch_size = Deoptimizer::patch_size(); | 6151 if (current_pc < last_lazy_deopt_pc_ + space_needed) { |
| 6202 if (current_pc < last_lazy_deopt_pc_ + patch_size) { | 6152 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; |
| 6203 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc; | |
| 6204 __ Nop(padding_size); | 6153 __ Nop(padding_size); |
| 6205 } | 6154 } |
| 6206 } | 6155 } |
| 6207 last_lazy_deopt_pc_ = masm()->pc_offset(); | 6156 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 6208 } | 6157 } |
| 6209 | 6158 |
| 6210 | 6159 |
| 6211 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 6160 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
| 6212 EnsureSpaceForLazyDeopt(); | 6161 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 6213 ASSERT(instr->HasEnvironment()); | 6162 ASSERT(instr->HasEnvironment()); |
| 6214 LEnvironment* env = instr->environment(); | 6163 LEnvironment* env = instr->environment(); |
| 6215 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 6164 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 6216 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 6165 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 6217 } | 6166 } |
| 6218 | 6167 |
| 6219 | 6168 |
| 6220 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 6169 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
| 6221 Deoptimizer::BailoutType type = instr->hydrogen()->type(); | 6170 Deoptimizer::BailoutType type = instr->hydrogen()->type(); |
| 6222 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the | 6171 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6273 ExternalReference stack_limit = | 6222 ExternalReference stack_limit = |
| 6274 ExternalReference::address_of_stack_limit(isolate()); | 6223 ExternalReference::address_of_stack_limit(isolate()); |
| 6275 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 6224 __ cmp(esp, Operand::StaticVariable(stack_limit)); |
| 6276 __ j(above_equal, &done, Label::kNear); | 6225 __ j(above_equal, &done, Label::kNear); |
| 6277 | 6226 |
| 6278 ASSERT(instr->context()->IsRegister()); | 6227 ASSERT(instr->context()->IsRegister()); |
| 6279 ASSERT(ToRegister(instr->context()).is(esi)); | 6228 ASSERT(ToRegister(instr->context()).is(esi)); |
| 6280 CallCode(isolate()->builtins()->StackCheck(), | 6229 CallCode(isolate()->builtins()->StackCheck(), |
| 6281 RelocInfo::CODE_TARGET, | 6230 RelocInfo::CODE_TARGET, |
| 6282 instr); | 6231 instr); |
| 6283 EnsureSpaceForLazyDeopt(); | 6232 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 6284 __ bind(&done); | 6233 __ bind(&done); |
| 6285 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 6234 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 6286 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 6235 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 6287 } else { | 6236 } else { |
| 6288 ASSERT(instr->hydrogen()->is_backwards_branch()); | 6237 ASSERT(instr->hydrogen()->is_backwards_branch()); |
| 6289 // Perform stack overflow check if this goto needs it before jumping. | 6238 // Perform stack overflow check if this goto needs it before jumping. |
| 6290 DeferredStackCheck* deferred_stack_check = | 6239 DeferredStackCheck* deferred_stack_check = |
| 6291 new(zone()) DeferredStackCheck(this, instr, x87_stack_); | 6240 new(zone()) DeferredStackCheck(this, instr, x87_stack_); |
| 6292 ExternalReference stack_limit = | 6241 ExternalReference stack_limit = |
| 6293 ExternalReference::address_of_stack_limit(isolate()); | 6242 ExternalReference::address_of_stack_limit(isolate()); |
| 6294 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 6243 __ cmp(esp, Operand::StaticVariable(stack_limit)); |
| 6295 __ j(below, deferred_stack_check->entry()); | 6244 __ j(below, deferred_stack_check->entry()); |
| 6296 EnsureSpaceForLazyDeopt(); | 6245 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 6297 __ bind(instr->done_label()); | 6246 __ bind(instr->done_label()); |
| 6298 deferred_stack_check->SetExit(instr->done_label()); | 6247 deferred_stack_check->SetExit(instr->done_label()); |
| 6299 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 6248 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 6300 // Don't record a deoptimization index for the safepoint here. | 6249 // Don't record a deoptimization index for the safepoint here. |
| 6301 // This will be done explicitly when emitting call and the safepoint in | 6250 // This will be done explicitly when emitting call and the safepoint in |
| 6302 // the deferred code. | 6251 // the deferred code. |
| 6303 } | 6252 } |
| 6304 } | 6253 } |
| 6305 | 6254 |
| 6306 | 6255 |
| (...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6404 FixedArray::kHeaderSize - kPointerSize)); | 6353 FixedArray::kHeaderSize - kPointerSize)); |
| 6405 __ bind(&done); | 6354 __ bind(&done); |
| 6406 } | 6355 } |
| 6407 | 6356 |
| 6408 | 6357 |
| 6409 #undef __ | 6358 #undef __ |
| 6410 | 6359 |
| 6411 } } // namespace v8::internal | 6360 } } // namespace v8::internal |
| 6412 | 6361 |
| 6413 #endif // V8_TARGET_ARCH_IA32 | 6362 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |