| Index: src/ia32/lithium-codegen-ia32.cc
|
| diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc
|
| index 716c65718f2c3afb76165184e758bf5e21d6d301..14d16c018e0f8ceac3cf0ddaa1646bb06f4e48db 100644
|
| --- a/src/ia32/lithium-codegen-ia32.cc
|
| +++ b/src/ia32/lithium-codegen-ia32.cc
|
| @@ -120,24 +120,6 @@ void LCodeGen::Abort(BailoutReason reason) {
|
| }
|
|
|
|
|
| -void LCodeGen::Comment(const char* format, ...) {
|
| - if (!FLAG_code_comments) return;
|
| - char buffer[4 * KB];
|
| - StringBuilder builder(buffer, ARRAY_SIZE(buffer));
|
| - va_list arguments;
|
| - va_start(arguments, format);
|
| - builder.AddFormattedList(format, arguments);
|
| - va_end(arguments);
|
| -
|
| - // Copy the string before recording it in the assembler to avoid
|
| - // issues when the stack allocated buffer goes out of scope.
|
| - size_t length = builder.position();
|
| - Vector<char> copy = Vector<char>::New(length + 1);
|
| - OS::MemCopy(copy.start(), builder.Finalize(), copy.length());
|
| - masm()->RecordComment(copy.start());
|
| -}
|
| -
|
| -
|
| #ifdef _MSC_VER
|
| void LCodeGen::MakeSureStackPagesMapped(int offset) {
|
| const int kPageSize = 4 * KB;
|
| @@ -384,51 +366,27 @@ void LCodeGen::GenerateOsrPrologue() {
|
| }
|
|
|
|
|
| -bool LCodeGen::GenerateBody() {
|
| - ASSERT(is_generating());
|
| - bool emit_instructions = true;
|
| - for (current_instruction_ = 0;
|
| - !is_aborted() && current_instruction_ < instructions_->length();
|
| - current_instruction_++) {
|
| - LInstruction* instr = instructions_->at(current_instruction_);
|
| -
|
| - // Don't emit code for basic blocks with a replacement.
|
| - if (instr->IsLabel()) {
|
| - emit_instructions = !LLabel::cast(instr)->HasReplacement();
|
| - }
|
| - if (!emit_instructions) continue;
|
| -
|
| - if (FLAG_code_comments && instr->HasInterestingComment(this)) {
|
| - Comment(";;; <@%d,#%d> %s",
|
| - current_instruction_,
|
| - instr->hydrogen_value()->id(),
|
| - instr->Mnemonic());
|
| - }
|
| -
|
| - if (!CpuFeatures::IsSupported(SSE2)) FlushX87StackIfNecessary(instr);
|
| -
|
| - RecordAndUpdatePosition(instr->position());
|
| +void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) {
|
| + if (!CpuFeatures::IsSupported(SSE2)) FlushX87StackIfNecessary(instr);
|
| +}
|
|
|
| - instr->CompileToNative(this);
|
|
|
| - if (!CpuFeatures::IsSupported(SSE2)) {
|
| - if (instr->IsGoto()) {
|
| - x87_stack_.LeavingBlock(current_block_, LGoto::cast(instr));
|
| - } else if (FLAG_debug_code && FLAG_enable_slow_asserts &&
|
| - !instr->IsGap() && !instr->IsReturn()) {
|
| - if (instr->ClobbersDoubleRegisters()) {
|
| - if (instr->HasDoubleRegisterResult()) {
|
| - ASSERT_EQ(1, x87_stack_.depth());
|
| - } else {
|
| - ASSERT_EQ(0, x87_stack_.depth());
|
| - }
|
| +void LCodeGen::GenerateBodyInstructionPost(LInstruction* instr) {
|
| + if (!CpuFeatures::IsSupported(SSE2)) {
|
| + if (instr->IsGoto()) {
|
| + x87_stack_.LeavingBlock(current_block_, LGoto::cast(instr));
|
| + } else if (FLAG_debug_code && FLAG_enable_slow_asserts &&
|
| + !instr->IsGap() && !instr->IsReturn()) {
|
| + if (instr->ClobbersDoubleRegisters()) {
|
| + if (instr->HasDoubleRegisterResult()) {
|
| + ASSERT_EQ(1, x87_stack_.depth());
|
| + } else {
|
| + ASSERT_EQ(0, x87_stack_.depth());
|
| }
|
| - __ VerifyX87StackDepth(x87_stack_.depth());
|
| }
|
| + __ VerifyX87StackDepth(x87_stack_.depth());
|
| }
|
| }
|
| - EnsureSpaceForLazyDeopt();
|
| - return !is_aborted();
|
| }
|
|
|
|
|
| @@ -2342,14 +2300,6 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
|
| }
|
|
|
|
|
| -int LCodeGen::GetNextEmittedBlock() const {
|
| - for (int i = current_block_ + 1; i < graph()->blocks()->length(); ++i) {
|
| - if (!chunk_->GetLabel(i)->HasReplacement()) return i;
|
| - }
|
| - return -1;
|
| -}
|
| -
|
| -
|
| template<class InstrType>
|
| void LCodeGen::EmitBranch(InstrType instr, Condition cc) {
|
| int left_block = instr->TrueDestination(chunk_);
|
| @@ -6194,14 +6144,13 @@ void LCodeGen::EmitIsConstructCall(Register temp) {
|
| }
|
|
|
|
|
| -void LCodeGen::EnsureSpaceForLazyDeopt() {
|
| +void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) {
|
| if (!info()->IsStub()) {
|
| // Ensure that we have enough space after the previous lazy-bailout
|
| // instruction for patching the code here.
|
| int current_pc = masm()->pc_offset();
|
| - int patch_size = Deoptimizer::patch_size();
|
| - if (current_pc < last_lazy_deopt_pc_ + patch_size) {
|
| - int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc;
|
| + if (current_pc < last_lazy_deopt_pc_ + space_needed) {
|
| + int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
|
| __ Nop(padding_size);
|
| }
|
| }
|
| @@ -6210,7 +6159,7 @@ void LCodeGen::EnsureSpaceForLazyDeopt() {
|
|
|
|
|
| void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
|
| - EnsureSpaceForLazyDeopt();
|
| + EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
|
| ASSERT(instr->HasEnvironment());
|
| LEnvironment* env = instr->environment();
|
| RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
|
| @@ -6281,7 +6230,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
|
| CallCode(isolate()->builtins()->StackCheck(),
|
| RelocInfo::CODE_TARGET,
|
| instr);
|
| - EnsureSpaceForLazyDeopt();
|
| + EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
|
| __ bind(&done);
|
| RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
|
| safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
|
| @@ -6294,7 +6243,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
|
| ExternalReference::address_of_stack_limit(isolate());
|
| __ cmp(esp, Operand::StaticVariable(stack_limit));
|
| __ j(below, deferred_stack_check->entry());
|
| - EnsureSpaceForLazyDeopt();
|
| + EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
|
| __ bind(instr->done_label());
|
| deferred_stack_check->SetExit(instr->done_label());
|
| RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
|
|
|