| Index: src/ia32/lithium-ia32.cc
|
| diff --git a/src/ia32/lithium-ia32.cc b/src/ia32/lithium-ia32.cc
|
| index 4207410cdf28c4dbc5c9c779da85436c7ac0a32f..ea4e5e790798a473eeebd4d4e1760503147b281d 100644
|
| --- a/src/ia32/lithium-ia32.cc
|
| +++ b/src/ia32/lithium-ia32.cc
|
| @@ -44,10 +44,10 @@ LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
|
| #undef DEFINE_COMPILE
|
|
|
| LOsrEntry::LOsrEntry() {
|
| - for (int i = 0; i < Register::kNumAllocatableRegisters; ++i) {
|
| + for (int i = 0; i < Register::NumAllocatableRegisters(); ++i) {
|
| register_spills_[i] = NULL;
|
| }
|
| - for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) {
|
| + for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) {
|
| double_register_spills_[i] = NULL;
|
| }
|
| }
|
| @@ -439,9 +439,11 @@ LPlatformChunk* LChunkBuilder::Build() {
|
| status_ = BUILDING;
|
|
|
| // Reserve the first spill slot for the state of dynamic alignment.
|
| - int alignment_state_index = chunk_->GetNextSpillIndex(false);
|
| - ASSERT_EQ(alignment_state_index, 0);
|
| - USE(alignment_state_index);
|
| + if (info()->IsOptimizing()) {
|
| + int alignment_state_index = chunk_->GetNextSpillIndex(false);
|
| + ASSERT_EQ(alignment_state_index, 0);
|
| + USE(alignment_state_index);
|
| + }
|
|
|
| const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
|
| for (int i = 0; i < blocks->length(); i++) {
|
| @@ -473,6 +475,12 @@ LUnallocated* LChunkBuilder::ToUnallocated(XMMRegister reg) {
|
| }
|
|
|
|
|
| +LUnallocated* LChunkBuilder::ToUnallocated(X87TopOfStackProxyRegister reg) {
|
| + return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
|
| + X87TopOfStackProxyRegister::ToAllocationIndex(reg));
|
| +}
|
| +
|
| +
|
| LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
|
| return Use(value, ToUnallocated(fixed_register));
|
| }
|
| @@ -605,6 +613,13 @@ LInstruction* LChunkBuilder::DefineFixedDouble(
|
| }
|
|
|
|
|
| +template<int I, int T>
|
| +LInstruction* LChunkBuilder::DefineX87TOS(
|
| + LTemplateInstruction<1, I, T>* instr) {
|
| + return Define(instr, ToUnallocated(x87tos));
|
| +}
|
| +
|
| +
|
| LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
|
| HEnvironment* hydrogen_env = current_block_->last_environment();
|
| int argument_index_accumulator = 0;
|
| @@ -617,6 +632,8 @@ LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
|
| LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
|
| HInstruction* hinstr,
|
| CanDeoptimize can_deoptimize) {
|
| + info()->MarkAsNonDeferredCalling();
|
| +
|
| #ifdef DEBUG
|
| instr->VerifyCall();
|
| #endif
|
| @@ -782,44 +799,46 @@ void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
|
| ASSERT(is_building());
|
| current_block_ = block;
|
| next_block_ = next_block;
|
| - if (block->IsStartBlock()) {
|
| - block->UpdateEnvironment(graph_->start_environment());
|
| - argument_count_ = 0;
|
| - } else if (block->predecessors()->length() == 1) {
|
| - // We have a single predecessor => copy environment and outgoing
|
| - // argument count from the predecessor.
|
| - ASSERT(block->phis()->length() == 0);
|
| - HBasicBlock* pred = block->predecessors()->at(0);
|
| - HEnvironment* last_environment = pred->last_environment();
|
| - ASSERT(last_environment != NULL);
|
| - // Only copy the environment, if it is later used again.
|
| - if (pred->end()->SecondSuccessor() == NULL) {
|
| - ASSERT(pred->end()->FirstSuccessor() == block);
|
| + if (graph()->info()->IsOptimizing()) {
|
| + if (block->IsStartBlock()) {
|
| + block->UpdateEnvironment(graph_->start_environment());
|
| + argument_count_ = 0;
|
| + } else if (block->predecessors()->length() == 1) {
|
| + // We have a single predecessor => copy environment and outgoing
|
| + // argument count from the predecessor.
|
| + ASSERT(block->phis()->length() == 0);
|
| + HBasicBlock* pred = block->predecessors()->at(0);
|
| + HEnvironment* last_environment = pred->last_environment();
|
| + ASSERT(last_environment != NULL);
|
| + // Only copy the environment, if it is later used again.
|
| + if (pred->end()->SecondSuccessor() == NULL) {
|
| + ASSERT(pred->end()->FirstSuccessor() == block);
|
| + } else {
|
| + if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
|
| + pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
|
| + last_environment = last_environment->Copy();
|
| + }
|
| + }
|
| + block->UpdateEnvironment(last_environment);
|
| + ASSERT(pred->argument_count() >= 0);
|
| + argument_count_ = pred->argument_count();
|
| } else {
|
| - if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
|
| - pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
|
| - last_environment = last_environment->Copy();
|
| + // We are at a state join => process phis.
|
| + HBasicBlock* pred = block->predecessors()->at(0);
|
| + // No need to copy the environment, it cannot be used later.
|
| + HEnvironment* last_environment = pred->last_environment();
|
| + for (int i = 0; i < block->phis()->length(); ++i) {
|
| + HPhi* phi = block->phis()->at(i);
|
| + last_environment->SetValueAt(phi->merged_index(), phi);
|
| }
|
| + for (int i = 0; i < block->deleted_phis()->length(); ++i) {
|
| + last_environment->SetValueAt(block->deleted_phis()->at(i),
|
| + graph_->GetConstantUndefined());
|
| + }
|
| + block->UpdateEnvironment(last_environment);
|
| + // Pick up the outgoing argument count of one of the predecessors.
|
| + argument_count_ = pred->argument_count();
|
| }
|
| - block->UpdateEnvironment(last_environment);
|
| - ASSERT(pred->argument_count() >= 0);
|
| - argument_count_ = pred->argument_count();
|
| - } else {
|
| - // We are at a state join => process phis.
|
| - HBasicBlock* pred = block->predecessors()->at(0);
|
| - // No need to copy the environment, it cannot be used later.
|
| - HEnvironment* last_environment = pred->last_environment();
|
| - for (int i = 0; i < block->phis()->length(); ++i) {
|
| - HPhi* phi = block->phis()->at(i);
|
| - last_environment->SetValueAt(phi->merged_index(), phi);
|
| - }
|
| - for (int i = 0; i < block->deleted_phis()->length(); ++i) {
|
| - last_environment->SetValueAt(block->deleted_phis()->at(i),
|
| - graph_->GetConstantUndefined());
|
| - }
|
| - block->UpdateEnvironment(last_environment);
|
| - // Pick up the outgoing argument count of one of the predecessors.
|
| - argument_count_ = pred->argument_count();
|
| }
|
| HInstruction* current = block->first();
|
| int start = chunk_->instructions()->length();
|
| @@ -1651,8 +1670,12 @@ LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) {
|
| LInstruction* LChunkBuilder::DoChange(HChange* instr) {
|
| Representation from = instr->from();
|
| Representation to = instr->to();
|
| + // Only mark conversions that might need to allocate as calling rather than
|
| + // all changes. This makes simple, non-allocating conversion not have to force
|
| + // building a stack frame.
|
| if (from.IsTagged()) {
|
| if (to.IsDouble()) {
|
| + info()->MarkAsDeferredCalling();
|
| LOperand* value = UseRegister(instr->value());
|
| // Temp register only necessary for minus zero check.
|
| LOperand* temp = instr->deoptimize_on_minus_zero()
|
| @@ -1677,7 +1700,10 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
|
| }
|
| } else if (from.IsDouble()) {
|
| if (to.IsTagged()) {
|
| - LOperand* value = UseRegister(instr->value());
|
| + info()->MarkAsDeferredCalling();
|
| + LOperand* value = CpuFeatures::IsSupported(SSE2)
|
| + ? UseRegisterAtStart(instr->value())
|
| + : UseAtStart(instr->value());
|
| LOperand* temp = TempRegister();
|
|
|
| // Make sure that temp and result_temp are different registers.
|
| @@ -1695,6 +1721,7 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
|
| DefineAsRegister(new(zone()) LDoubleToI(value, temp)));
|
| }
|
| } else if (from.IsInteger32()) {
|
| + info()->MarkAsDeferredCalling();
|
| if (to.IsTagged()) {
|
| HValue* val = instr->value();
|
| LOperand* value = UseRegister(val);
|
| @@ -2216,8 +2243,27 @@ LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
|
|
|
|
|
| LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
|
| - int spill_index = chunk()->GetParameterStackSlot(instr->index());
|
| - return DefineAsSpilled(new(zone()) LParameter, spill_index);
|
| + LParameter* result = new(zone()) LParameter;
|
| + switch (instr->kind()) {
|
| + case FUNCTION_PARAMETER: {
|
| + int spill_index = chunk()->GetParameterStackSlot(instr->index());
|
| + return DefineAsSpilled(result, spill_index);
|
| + }
|
| + case KEYED_LOAD_IC_PARAMETER: {
|
| + if (instr->index() == 0) return DefineFixed(result, edx);
|
| + if (instr->index() == 1) return DefineFixed(result, ecx);
|
| + UNREACHABLE();
|
| + break;
|
| + }
|
| + case KEYED_STORE_IC_PARAMETER:
|
| + if (instr->index() == 0) return DefineFixed(result, edx);
|
| + if (instr->index() == 1) return DefineFixed(result, ecx);
|
| + if (instr->index() == 2) return DefineFixed(result, eax);
|
| + break;
|
| + default:
|
| + UNREACHABLE();
|
| + }
|
| + return NULL;
|
| }
|
|
|
|
|
| @@ -2318,6 +2364,7 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
|
|
|
|
|
| LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
|
| + info()->MarkAsDeferredCalling();
|
| if (instr->is_function_entry()) {
|
| LOperand* context = UseFixed(instr->context(), esi);
|
| return MarkAsCall(new(zone()) LStackCheck(context), instr);
|
|
|