| OLD | NEW | 
|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. | 
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without | 
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are | 
| 4 // met: | 4 // met: | 
| 5 // | 5 // | 
| 6 //     * Redistributions of source code must retain the above copyright | 6 //     * Redistributions of source code must retain the above copyright | 
| 7 //       notice, this list of conditions and the following disclaimer. | 7 //       notice, this list of conditions and the following disclaimer. | 
| 8 //     * Redistributions in binary form must reproduce the above | 8 //     * Redistributions in binary form must reproduce the above | 
| 9 //       copyright notice, this list of conditions and the following | 9 //       copyright notice, this list of conditions and the following | 
| 10 //       disclaimer in the documentation and/or other materials provided | 10 //       disclaimer in the documentation and/or other materials provided | 
| (...skipping 24 matching lines...) Expand all  Loading... | 
| 35 namespace internal { | 35 namespace internal { | 
| 36 | 36 | 
| 37 #define DEFINE_COMPILE(type)                            \ | 37 #define DEFINE_COMPILE(type)                            \ | 
| 38   void L##type::CompileToNative(LCodeGen* generator) {  \ | 38   void L##type::CompileToNative(LCodeGen* generator) {  \ | 
| 39     generator->Do##type(this);                          \ | 39     generator->Do##type(this);                          \ | 
| 40   } | 40   } | 
| 41 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE) | 41 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE) | 
| 42 #undef DEFINE_COMPILE | 42 #undef DEFINE_COMPILE | 
| 43 | 43 | 
| 44 LOsrEntry::LOsrEntry() { | 44 LOsrEntry::LOsrEntry() { | 
| 45   for (int i = 0; i < Register::kNumAllocatableRegisters; ++i) { | 45   for (int i = 0; i < Register::NumAllocatableRegisters(); ++i) { | 
| 46     register_spills_[i] = NULL; | 46     register_spills_[i] = NULL; | 
| 47   } | 47   } | 
| 48   for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) { | 48   for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) { | 
| 49     double_register_spills_[i] = NULL; | 49     double_register_spills_[i] = NULL; | 
| 50   } | 50   } | 
| 51 } | 51 } | 
| 52 | 52 | 
| 53 | 53 | 
| 54 void LOsrEntry::MarkSpilledRegister(int allocation_index, | 54 void LOsrEntry::MarkSpilledRegister(int allocation_index, | 
| 55                                     LOperand* spill_operand) { | 55                                     LOperand* spill_operand) { | 
| 56   ASSERT(spill_operand->IsStackSlot()); | 56   ASSERT(spill_operand->IsStackSlot()); | 
| 57   ASSERT(register_spills_[allocation_index] == NULL); | 57   ASSERT(register_spills_[allocation_index] == NULL); | 
| 58   register_spills_[allocation_index] = spill_operand; | 58   register_spills_[allocation_index] = spill_operand; | 
| (...skipping 550 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 609   int argument_index_accumulator = 0; | 609   int argument_index_accumulator = 0; | 
| 610   instr->set_environment(CreateEnvironment(hydrogen_env, | 610   instr->set_environment(CreateEnvironment(hydrogen_env, | 
| 611                                            &argument_index_accumulator)); | 611                                            &argument_index_accumulator)); | 
| 612   return instr; | 612   return instr; | 
| 613 } | 613 } | 
| 614 | 614 | 
| 615 | 615 | 
| 616 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr, | 616 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr, | 
| 617                                         HInstruction* hinstr, | 617                                         HInstruction* hinstr, | 
| 618                                         CanDeoptimize can_deoptimize) { | 618                                         CanDeoptimize can_deoptimize) { | 
|  | 619   info()->MarkAsNonDeferredCalling(); | 
| 619 #ifdef DEBUG | 620 #ifdef DEBUG | 
| 620   instr->VerifyCall(); | 621   instr->VerifyCall(); | 
| 621 #endif | 622 #endif | 
| 622   instr->MarkAsCall(); | 623   instr->MarkAsCall(); | 
| 623   instr = AssignPointerMap(instr); | 624   instr = AssignPointerMap(instr); | 
| 624 | 625 | 
| 625   if (hinstr->HasObservableSideEffects()) { | 626   if (hinstr->HasObservableSideEffects()) { | 
| 626     ASSERT(hinstr->next()->IsSimulate()); | 627     ASSERT(hinstr->next()->IsSimulate()); | 
| 627     HSimulate* sim = HSimulate::cast(hinstr->next()); | 628     HSimulate* sim = HSimulate::cast(hinstr->next()); | 
| 628     ASSERT(instruction_pending_deoptimization_environment_ == NULL); | 629     ASSERT(instruction_pending_deoptimization_environment_ == NULL); | 
| (...skipping 947 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1576   UNREACHABLE(); | 1577   UNREACHABLE(); | 
| 1577   return NULL; | 1578   return NULL; | 
| 1578 } | 1579 } | 
| 1579 | 1580 | 
| 1580 | 1581 | 
| 1581 LInstruction* LChunkBuilder::DoChange(HChange* instr) { | 1582 LInstruction* LChunkBuilder::DoChange(HChange* instr) { | 
| 1582   Representation from = instr->from(); | 1583   Representation from = instr->from(); | 
| 1583   Representation to = instr->to(); | 1584   Representation to = instr->to(); | 
| 1584   if (from.IsTagged()) { | 1585   if (from.IsTagged()) { | 
| 1585     if (to.IsDouble()) { | 1586     if (to.IsDouble()) { | 
|  | 1587       info()->MarkAsDeferredCalling(); | 
| 1586       LOperand* value = UseRegister(instr->value()); | 1588       LOperand* value = UseRegister(instr->value()); | 
| 1587       LNumberUntagD* res = new(zone()) LNumberUntagD(value); | 1589       LNumberUntagD* res = new(zone()) LNumberUntagD(value); | 
| 1588       return AssignEnvironment(DefineAsRegister(res)); | 1590       return AssignEnvironment(DefineAsRegister(res)); | 
| 1589     } else { | 1591     } else { | 
| 1590       ASSERT(to.IsInteger32()); | 1592       ASSERT(to.IsInteger32()); | 
| 1591       LOperand* value = UseRegisterAtStart(instr->value()); | 1593       LOperand* value = UseRegisterAtStart(instr->value()); | 
| 1592       LInstruction* res = NULL; | 1594       LInstruction* res = NULL; | 
| 1593       if (instr->value()->type().IsSmi()) { | 1595       if (instr->value()->type().IsSmi()) { | 
| 1594         res = DefineAsRegister(new(zone()) LSmiUntag(value, false)); | 1596         res = DefineAsRegister(new(zone()) LSmiUntag(value, false)); | 
| 1595       } else { | 1597       } else { | 
| 1596         LOperand* temp1 = TempRegister(); | 1598         LOperand* temp1 = TempRegister(); | 
| 1597         LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister() | 1599         LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister() | 
| 1598                                                       : NULL; | 1600                                                       : NULL; | 
| 1599         LOperand* temp3 = FixedTemp(f22); | 1601         LOperand* temp3 = FixedTemp(f22); | 
| 1600         res = DefineSameAsFirst(new(zone()) LTaggedToI(value, | 1602         res = DefineSameAsFirst(new(zone()) LTaggedToI(value, | 
| 1601                                                        temp1, | 1603                                                        temp1, | 
| 1602                                                        temp2, | 1604                                                        temp2, | 
| 1603                                                        temp3)); | 1605                                                        temp3)); | 
| 1604         res = AssignEnvironment(res); | 1606         res = AssignEnvironment(res); | 
| 1605       } | 1607       } | 
| 1606       return res; | 1608       return res; | 
| 1607     } | 1609     } | 
| 1608   } else if (from.IsDouble()) { | 1610   } else if (from.IsDouble()) { | 
| 1609     if (to.IsTagged()) { | 1611     if (to.IsTagged()) { | 
|  | 1612       info()->MarkAsDeferredCalling(); | 
| 1610       LOperand* value = UseRegister(instr->value()); | 1613       LOperand* value = UseRegister(instr->value()); | 
| 1611       LOperand* temp1 = TempRegister(); | 1614       LOperand* temp1 = TempRegister(); | 
| 1612       LOperand* temp2 = TempRegister(); | 1615       LOperand* temp2 = TempRegister(); | 
| 1613 | 1616 | 
| 1614       // Make sure that the temp and result_temp registers are | 1617       // Make sure that the temp and result_temp registers are | 
| 1615       // different. | 1618       // different. | 
| 1616       LUnallocated* result_temp = TempRegister(); | 1619       LUnallocated* result_temp = TempRegister(); | 
| 1617       LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2); | 1620       LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2); | 
| 1618       Define(result, result_temp); | 1621       Define(result, result_temp); | 
| 1619       return AssignPointerMap(result); | 1622       return AssignPointerMap(result); | 
| 1620     } else { | 1623     } else { | 
| 1621       ASSERT(to.IsInteger32()); | 1624       ASSERT(to.IsInteger32()); | 
| 1622       LOperand* value = UseRegister(instr->value()); | 1625       LOperand* value = UseRegister(instr->value()); | 
| 1623       LOperand* temp1 = TempRegister(); | 1626       LOperand* temp1 = TempRegister(); | 
| 1624       LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister() : NULL; | 1627       LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister() : NULL; | 
| 1625       LDoubleToI* res = new(zone()) LDoubleToI(value, temp1, temp2); | 1628       LDoubleToI* res = new(zone()) LDoubleToI(value, temp1, temp2); | 
| 1626       return AssignEnvironment(DefineAsRegister(res)); | 1629       return AssignEnvironment(DefineAsRegister(res)); | 
| 1627     } | 1630     } | 
| 1628   } else if (from.IsInteger32()) { | 1631   } else if (from.IsInteger32()) { | 
|  | 1632     info()->MarkAsDeferredCalling(); | 
| 1629     if (to.IsTagged()) { | 1633     if (to.IsTagged()) { | 
| 1630       HValue* val = instr->value(); | 1634       HValue* val = instr->value(); | 
| 1631       LOperand* value = UseRegisterAtStart(val); | 1635       LOperand* value = UseRegisterAtStart(val); | 
| 1632       if (val->CheckFlag(HInstruction::kUint32)) { | 1636       if (val->CheckFlag(HInstruction::kUint32)) { | 
| 1633         LNumberTagU* result = new(zone()) LNumberTagU(value); | 1637         LNumberTagU* result = new(zone()) LNumberTagU(value); | 
| 1634         return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result))); | 1638         return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result))); | 
| 1635       } else if (val->HasRange() && val->range()->IsInSmiRange()) { | 1639       } else if (val->HasRange() && val->range()->IsInSmiRange()) { | 
| 1636         return DefineAsRegister(new(zone()) LSmiTag(value)); | 1640         return DefineAsRegister(new(zone()) LSmiTag(value)); | 
| 1637       } else { | 1641       } else { | 
| 1638         LNumberTagI* result = new(zone()) LNumberTagI(value); | 1642         LNumberTagI* result = new(zone()) LNumberTagI(value); | 
| (...skipping 218 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1857     } | 1861     } | 
| 1858     result = new(zone()) LLoadKeyed(obj, key); | 1862     result = new(zone()) LLoadKeyed(obj, key); | 
| 1859   } else { | 1863   } else { | 
| 1860     ASSERT( | 1864     ASSERT( | 
| 1861         (instr->representation().IsInteger32() && | 1865         (instr->representation().IsInteger32() && | 
| 1862          (elements_kind != EXTERNAL_FLOAT_ELEMENTS) && | 1866          (elements_kind != EXTERNAL_FLOAT_ELEMENTS) && | 
| 1863          (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) || | 1867          (elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) || | 
| 1864         (instr->representation().IsDouble() && | 1868         (instr->representation().IsDouble() && | 
| 1865          ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) || | 1869          ((elements_kind == EXTERNAL_FLOAT_ELEMENTS) || | 
| 1866           (elements_kind == EXTERNAL_DOUBLE_ELEMENTS)))); | 1870           (elements_kind == EXTERNAL_DOUBLE_ELEMENTS)))); | 
| 1867     LOperand* external_pointer = UseRegister(instr->elements()); | 1871     // float->double conversion on non-VFP2 requires an extra scratch | 
|  | 1872     // register. For convenience, just mark the elements register as "UseTemp" | 
|  | 1873     // so that it can be used as a temp during the float->double conversion | 
|  | 1874     // after it's no longer needed after the float load. | 
|  | 1875     bool needs_temp = | 
|  | 1876         !CpuFeatures::IsSupported(FPU) && | 
|  | 1877         (elements_kind == EXTERNAL_FLOAT_ELEMENTS); | 
|  | 1878     LOperand* external_pointer = needs_temp | 
|  | 1879         ? UseTempRegister(instr->elements()) | 
|  | 1880         : UseRegister(instr->elements()); | 
| 1868     result = new(zone()) LLoadKeyed(external_pointer, key); | 1881     result = new(zone()) LLoadKeyed(external_pointer, key); | 
| 1869   } | 1882   } | 
| 1870 | 1883 | 
| 1871   DefineAsRegister(result); | 1884   DefineAsRegister(result); | 
| 1872   // An unsigned int array load might overflow and cause a deopt, make sure it | 1885   // An unsigned int array load might overflow and cause a deopt, make sure it | 
| 1873   // has an environment. | 1886   // has an environment. | 
| 1874   bool can_deoptimize = instr->RequiresHoleCheck() || | 1887   bool can_deoptimize = instr->RequiresHoleCheck() || | 
| 1875       (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS); | 1888       (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS); | 
| 1876   return can_deoptimize ? AssignEnvironment(result) : result; | 1889   return can_deoptimize ? AssignEnvironment(result) : result; | 
| 1877 } | 1890 } | 
| (...skipping 198 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 2076 | 2089 | 
| 2077 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) { | 2090 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) { | 
| 2078   ASSERT(argument_count_ == 0); | 2091   ASSERT(argument_count_ == 0); | 
| 2079   allocator_->MarkAsOsrEntry(); | 2092   allocator_->MarkAsOsrEntry(); | 
| 2080   current_block_->last_environment()->set_ast_id(instr->ast_id()); | 2093   current_block_->last_environment()->set_ast_id(instr->ast_id()); | 
| 2081   return AssignEnvironment(new(zone()) LOsrEntry); | 2094   return AssignEnvironment(new(zone()) LOsrEntry); | 
| 2082 } | 2095 } | 
| 2083 | 2096 | 
| 2084 | 2097 | 
| 2085 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) { | 2098 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) { | 
| 2086   int spill_index = chunk()->GetParameterStackSlot(instr->index()); | 2099   LParameter* result = new(zone()) LParameter; | 
| 2087   return DefineAsSpilled(new(zone()) LParameter, spill_index); | 2100   if (info()->IsOptimizing()) { | 
|  | 2101     int spill_index = chunk()->GetParameterStackSlot(instr->index()); | 
|  | 2102     return DefineAsSpilled(result, spill_index); | 
|  | 2103   } else { | 
|  | 2104     ASSERT(info()->IsStub()); | 
|  | 2105     CodeStubInterfaceDescriptor* descriptor = | 
|  | 2106         info()->code_stub()->GetInterfaceDescriptor(info()->isolate()); | 
|  | 2107     Register reg = descriptor->register_params_[instr->index()]; | 
|  | 2108     return DefineFixed(result, reg); | 
|  | 2109   } | 
| 2088 } | 2110 } | 
| 2089 | 2111 | 
| 2090 | 2112 | 
| 2091 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) { | 2113 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) { | 
| 2092   int spill_index = chunk()->GetNextSpillIndex(false);  // Not double-width. | 2114   int spill_index = chunk()->GetNextSpillIndex(false);  // Not double-width. | 
| 2093   if (spill_index > LUnallocated::kMaxFixedIndex) { | 2115   if (spill_index > LUnallocated::kMaxFixedIndex) { | 
| 2094     Abort("Too many spill slots needed for OSR"); | 2116     Abort("Too many spill slots needed for OSR"); | 
| 2095     spill_index = 0; | 2117     spill_index = 0; | 
| 2096   } | 2118   } | 
| 2097   return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index); | 2119   return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index); | 
| (...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 2256 | 2278 | 
| 2257 | 2279 | 
| 2258 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { | 2280 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { | 
| 2259   LOperand* object = UseRegister(instr->object()); | 2281   LOperand* object = UseRegister(instr->object()); | 
| 2260   LOperand* index = UseRegister(instr->index()); | 2282   LOperand* index = UseRegister(instr->index()); | 
| 2261   return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index)); | 2283   return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index)); | 
| 2262 } | 2284 } | 
| 2263 | 2285 | 
| 2264 | 2286 | 
| 2265 } }  // namespace v8::internal | 2287 } }  // namespace v8::internal | 
| OLD | NEW | 
|---|