| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 26 matching lines...) Expand all Loading... |
| 37 namespace internal { | 37 namespace internal { |
| 38 | 38 |
| 39 #define DEFINE_COMPILE(type) \ | 39 #define DEFINE_COMPILE(type) \ |
| 40 void L##type::CompileToNative(LCodeGen* generator) { \ | 40 void L##type::CompileToNative(LCodeGen* generator) { \ |
| 41 generator->Do##type(this); \ | 41 generator->Do##type(this); \ |
| 42 } | 42 } |
| 43 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE) | 43 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE) |
| 44 #undef DEFINE_COMPILE | 44 #undef DEFINE_COMPILE |
| 45 | 45 |
| 46 LOsrEntry::LOsrEntry() { | 46 LOsrEntry::LOsrEntry() { |
| 47 for (int i = 0; i < Register::kNumAllocatableRegisters; ++i) { | 47 for (int i = 0; i < Register::NumAllocatableRegisters(); ++i) { |
| 48 register_spills_[i] = NULL; | 48 register_spills_[i] = NULL; |
| 49 } | 49 } |
| 50 for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) { | 50 for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) { |
| 51 double_register_spills_[i] = NULL; | 51 double_register_spills_[i] = NULL; |
| 52 } | 52 } |
| 53 } | 53 } |
| 54 | 54 |
| 55 | 55 |
| 56 void LOsrEntry::MarkSpilledRegister(int allocation_index, | 56 void LOsrEntry::MarkSpilledRegister(int allocation_index, |
| 57 LOperand* spill_operand) { | 57 LOperand* spill_operand) { |
| 58 ASSERT(spill_operand->IsStackSlot()); | 58 ASSERT(spill_operand->IsStackSlot()); |
| 59 ASSERT(register_spills_[allocation_index] == NULL); | 59 ASSERT(register_spills_[allocation_index] == NULL); |
| 60 register_spills_[allocation_index] = spill_operand; | 60 register_spills_[allocation_index] = spill_operand; |
| (...skipping 530 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 591 int argument_index_accumulator = 0; | 591 int argument_index_accumulator = 0; |
| 592 instr->set_environment(CreateEnvironment(hydrogen_env, | 592 instr->set_environment(CreateEnvironment(hydrogen_env, |
| 593 &argument_index_accumulator)); | 593 &argument_index_accumulator)); |
| 594 return instr; | 594 return instr; |
| 595 } | 595 } |
| 596 | 596 |
| 597 | 597 |
| 598 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr, | 598 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr, |
| 599 HInstruction* hinstr, | 599 HInstruction* hinstr, |
| 600 CanDeoptimize can_deoptimize) { | 600 CanDeoptimize can_deoptimize) { |
| 601 info()->MarkAsNonDeferredCalling(); |
| 602 |
| 601 #ifdef DEBUG | 603 #ifdef DEBUG |
| 602 instr->VerifyCall(); | 604 instr->VerifyCall(); |
| 603 #endif | 605 #endif |
| 604 instr->MarkAsCall(); | 606 instr->MarkAsCall(); |
| 605 instr = AssignPointerMap(instr); | 607 instr = AssignPointerMap(instr); |
| 606 | 608 |
| 607 if (hinstr->HasObservableSideEffects()) { | 609 if (hinstr->HasObservableSideEffects()) { |
| 608 ASSERT(hinstr->next()->IsSimulate()); | 610 ASSERT(hinstr->next()->IsSimulate()); |
| 609 HSimulate* sim = HSimulate::cast(hinstr->next()); | 611 HSimulate* sim = HSimulate::cast(hinstr->next()); |
| 610 ASSERT(instruction_pending_deoptimization_environment_ == NULL); | 612 ASSERT(instruction_pending_deoptimization_environment_ == NULL); |
| (...skipping 970 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1581 // All HForceRepresentation instructions should be eliminated in the | 1583 // All HForceRepresentation instructions should be eliminated in the |
| 1582 // representation change phase of Hydrogen. | 1584 // representation change phase of Hydrogen. |
| 1583 UNREACHABLE(); | 1585 UNREACHABLE(); |
| 1584 return NULL; | 1586 return NULL; |
| 1585 } | 1587 } |
| 1586 | 1588 |
| 1587 | 1589 |
| 1588 LInstruction* LChunkBuilder::DoChange(HChange* instr) { | 1590 LInstruction* LChunkBuilder::DoChange(HChange* instr) { |
| 1589 Representation from = instr->from(); | 1591 Representation from = instr->from(); |
| 1590 Representation to = instr->to(); | 1592 Representation to = instr->to(); |
| 1593 // Only mark conversions that might need to allocate as calling rather than |
| 1594 // all changes. This makes simple, non-allocating conversion not have to force |
| 1595 // building a stack frame. |
| 1591 if (from.IsTagged()) { | 1596 if (from.IsTagged()) { |
| 1592 if (to.IsDouble()) { | 1597 if (to.IsDouble()) { |
| 1598 info()->MarkAsDeferredCalling(); |
| 1593 LOperand* value = UseRegister(instr->value()); | 1599 LOperand* value = UseRegister(instr->value()); |
| 1594 LNumberUntagD* res = new(zone()) LNumberUntagD(value); | 1600 LNumberUntagD* res = new(zone()) LNumberUntagD(value); |
| 1595 return AssignEnvironment(DefineAsRegister(res)); | 1601 return AssignEnvironment(DefineAsRegister(res)); |
| 1596 } else { | 1602 } else { |
| 1597 ASSERT(to.IsInteger32()); | 1603 ASSERT(to.IsInteger32()); |
| 1598 LOperand* value = UseRegister(instr->value()); | 1604 LOperand* value = UseRegister(instr->value()); |
| 1599 if (instr->value()->type().IsSmi()) { | 1605 if (instr->value()->type().IsSmi()) { |
| 1600 return DefineSameAsFirst(new(zone()) LSmiUntag(value, false)); | 1606 return DefineSameAsFirst(new(zone()) LSmiUntag(value, false)); |
| 1601 } else { | 1607 } else { |
| 1602 bool truncating = instr->CanTruncateToInt32(); | 1608 bool truncating = instr->CanTruncateToInt32(); |
| 1603 LOperand* xmm_temp = truncating ? NULL : FixedTemp(xmm1); | 1609 LOperand* xmm_temp = truncating ? NULL : FixedTemp(xmm1); |
| 1604 LTaggedToI* res = new(zone()) LTaggedToI(value, xmm_temp); | 1610 LTaggedToI* res = new(zone()) LTaggedToI(value, xmm_temp); |
| 1605 return AssignEnvironment(DefineSameAsFirst(res)); | 1611 return AssignEnvironment(DefineSameAsFirst(res)); |
| 1606 } | 1612 } |
| 1607 } | 1613 } |
| 1608 } else if (from.IsDouble()) { | 1614 } else if (from.IsDouble()) { |
| 1609 if (to.IsTagged()) { | 1615 if (to.IsTagged()) { |
| 1616 info()->MarkAsDeferredCalling(); |
| 1610 LOperand* value = UseRegister(instr->value()); | 1617 LOperand* value = UseRegister(instr->value()); |
| 1611 LOperand* temp = TempRegister(); | 1618 LOperand* temp = TempRegister(); |
| 1612 | 1619 |
| 1613 // Make sure that temp and result_temp are different registers. | 1620 // Make sure that temp and result_temp are different registers. |
| 1614 LUnallocated* result_temp = TempRegister(); | 1621 LUnallocated* result_temp = TempRegister(); |
| 1615 LNumberTagD* result = new(zone()) LNumberTagD(value, temp); | 1622 LNumberTagD* result = new(zone()) LNumberTagD(value, temp); |
| 1616 return AssignPointerMap(Define(result, result_temp)); | 1623 return AssignPointerMap(Define(result, result_temp)); |
| 1617 } else { | 1624 } else { |
| 1618 ASSERT(to.IsInteger32()); | 1625 ASSERT(to.IsInteger32()); |
| 1619 LOperand* value = UseRegister(instr->value()); | 1626 LOperand* value = UseRegister(instr->value()); |
| 1620 return AssignEnvironment(DefineAsRegister(new(zone()) LDoubleToI(value))); | 1627 return AssignEnvironment(DefineAsRegister(new(zone()) LDoubleToI(value))); |
| 1621 } | 1628 } |
| 1622 } else if (from.IsInteger32()) { | 1629 } else if (from.IsInteger32()) { |
| 1630 info()->MarkAsDeferredCalling(); |
| 1623 if (to.IsTagged()) { | 1631 if (to.IsTagged()) { |
| 1624 HValue* val = instr->value(); | 1632 HValue* val = instr->value(); |
| 1625 LOperand* value = UseRegister(val); | 1633 LOperand* value = UseRegister(val); |
| 1626 if (val->CheckFlag(HInstruction::kUint32)) { | 1634 if (val->CheckFlag(HInstruction::kUint32)) { |
| 1627 LOperand* temp = FixedTemp(xmm1); | 1635 LOperand* temp = FixedTemp(xmm1); |
| 1628 LNumberTagU* result = new(zone()) LNumberTagU(value, temp); | 1636 LNumberTagU* result = new(zone()) LNumberTagU(value, temp); |
| 1629 return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result))); | 1637 return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result))); |
| 1630 } else if (val->HasRange() && val->range()->IsInSmiRange()) { | 1638 } else if (val->HasRange() && val->range()->IsInSmiRange()) { |
| 1631 return DefineSameAsFirst(new(zone()) LSmiTag(value)); | 1639 return DefineSameAsFirst(new(zone()) LSmiTag(value)); |
| 1632 } else { | 1640 } else { |
| (...skipping 443 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2076 | 2084 |
| 2077 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) { | 2085 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) { |
| 2078 ASSERT(argument_count_ == 0); | 2086 ASSERT(argument_count_ == 0); |
| 2079 allocator_->MarkAsOsrEntry(); | 2087 allocator_->MarkAsOsrEntry(); |
| 2080 current_block_->last_environment()->set_ast_id(instr->ast_id()); | 2088 current_block_->last_environment()->set_ast_id(instr->ast_id()); |
| 2081 return AssignEnvironment(new(zone()) LOsrEntry); | 2089 return AssignEnvironment(new(zone()) LOsrEntry); |
| 2082 } | 2090 } |
| 2083 | 2091 |
| 2084 | 2092 |
| 2085 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) { | 2093 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) { |
| 2086 int spill_index = chunk()->GetParameterStackSlot(instr->index()); | 2094 LParameter* result = new(zone()) LParameter; |
| 2087 return DefineAsSpilled(new(zone()) LParameter, spill_index); | 2095 switch (instr->kind()) { |
| 2096 case FUNCTION_PARAMETER: { |
| 2097 int spill_index = chunk()->GetParameterStackSlot(instr->index()); |
| 2098 return DefineAsSpilled(result, spill_index); |
| 2099 } |
| 2100 case KEYED_LOAD_IC_PARAMETER: { |
| 2101 if (instr->index() == 0) return DefineFixed(result, rdx); |
| 2102 if (instr->index() == 1) return DefineFixed(result, rax); |
| 2103 UNREACHABLE(); |
| 2104 break; |
| 2105 } |
| 2106 case KEYED_STORE_IC_PARAMETER: |
| 2107 if (instr->index() == 0) return DefineFixed(result, rdx); |
| 2108 if (instr->index() == 1) return DefineFixed(result, rcx); |
| 2109 if (instr->index() == 2) return DefineFixed(result, rax); |
| 2110 break; |
| 2111 default: |
| 2112 UNREACHABLE(); |
| 2113 } |
| 2114 return NULL; |
| 2088 } | 2115 } |
| 2089 | 2116 |
| 2090 | 2117 |
| 2091 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) { | 2118 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) { |
| 2092 int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width. | 2119 int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width. |
| 2093 if (spill_index > LUnallocated::kMaxFixedIndex) { | 2120 if (spill_index > LUnallocated::kMaxFixedIndex) { |
| 2094 Abort("Too many spill slots needed for OSR"); | 2121 Abort("Too many spill slots needed for OSR"); |
| 2095 spill_index = 0; | 2122 spill_index = 0; |
| 2096 } | 2123 } |
| 2097 return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index); | 2124 return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index); |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2173 instruction_pending_deoptimization_environment_ = NULL; | 2200 instruction_pending_deoptimization_environment_ = NULL; |
| 2174 pending_deoptimization_ast_id_ = BailoutId::None(); | 2201 pending_deoptimization_ast_id_ = BailoutId::None(); |
| 2175 return result; | 2202 return result; |
| 2176 } | 2203 } |
| 2177 | 2204 |
| 2178 return NULL; | 2205 return NULL; |
| 2179 } | 2206 } |
| 2180 | 2207 |
| 2181 | 2208 |
| 2182 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) { | 2209 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) { |
| 2210 info()->MarkAsDeferredCalling(); |
| 2183 if (instr->is_function_entry()) { | 2211 if (instr->is_function_entry()) { |
| 2184 return MarkAsCall(new(zone()) LStackCheck, instr); | 2212 return MarkAsCall(new(zone()) LStackCheck, instr); |
| 2185 } else { | 2213 } else { |
| 2186 ASSERT(instr->is_backwards_branch()); | 2214 ASSERT(instr->is_backwards_branch()); |
| 2187 return AssignEnvironment(AssignPointerMap(new(zone()) LStackCheck)); | 2215 return AssignEnvironment(AssignPointerMap(new(zone()) LStackCheck)); |
| 2188 } | 2216 } |
| 2189 } | 2217 } |
| 2190 | 2218 |
| 2191 | 2219 |
| 2192 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) { | 2220 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) { |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2259 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { | 2287 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { |
| 2260 LOperand* object = UseRegister(instr->object()); | 2288 LOperand* object = UseRegister(instr->object()); |
| 2261 LOperand* index = UseTempRegister(instr->index()); | 2289 LOperand* index = UseTempRegister(instr->index()); |
| 2262 return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index)); | 2290 return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index)); |
| 2263 } | 2291 } |
| 2264 | 2292 |
| 2265 | 2293 |
| 2266 } } // namespace v8::internal | 2294 } } // namespace v8::internal |
| 2267 | 2295 |
| 2268 #endif // V8_TARGET_ARCH_X64 | 2296 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |