| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 927 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 938 } | 938 } |
| 939 | 939 |
| 940 | 940 |
| 941 void LCodeGen::Deoptimize(LEnvironment* environment) { | 941 void LCodeGen::Deoptimize(LEnvironment* environment) { |
| 942 Deoptimizer::BailoutType bailout_type = info()->IsStub() ? Deoptimizer::LAZY | 942 Deoptimizer::BailoutType bailout_type = info()->IsStub() ? Deoptimizer::LAZY |
| 943 : Deoptimizer::EAGER; | 943 : Deoptimizer::EAGER; |
| 944 Deoptimize(environment, bailout_type); | 944 Deoptimize(environment, bailout_type); |
| 945 } | 945 } |
| 946 | 946 |
| 947 | 947 |
| 948 void LCodeGen::SoftDeoptimize(LEnvironment* environment) { | |
| 949 ASSERT(!info()->IsStub()); | |
| 950 Deoptimize(environment, Deoptimizer::SOFT); | |
| 951 } | |
| 952 | |
| 953 | |
| 954 void LCodeGen::DeoptimizeIf(Condition cond, LEnvironment* environment) { | 948 void LCodeGen::DeoptimizeIf(Condition cond, LEnvironment* environment) { |
| 955 Label dont_deopt; | 949 Label dont_deopt; |
| 956 __ B(InvertCondition(cond), &dont_deopt); | 950 __ B(InvertCondition(cond), &dont_deopt); |
| 957 Deoptimize(environment); | 951 Deoptimize(environment); |
| 958 __ Bind(&dont_deopt); | 952 __ Bind(&dont_deopt); |
| 959 } | 953 } |
| 960 | 954 |
| 961 | 955 |
| 962 void LCodeGen::DeoptimizeIfZero(Register rt, LEnvironment* environment) { | 956 void LCodeGen::DeoptimizeIfZero(Register rt, LEnvironment* environment) { |
| 963 Label dont_deopt; | 957 Label dont_deopt; |
| (...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1063 return DoubleRegister::FromAllocationIndex(op->index()); | 1057 return DoubleRegister::FromAllocationIndex(op->index()); |
| 1064 } | 1058 } |
| 1065 | 1059 |
| 1066 | 1060 |
| 1067 Operand LCodeGen::ToOperand(LOperand* op) { | 1061 Operand LCodeGen::ToOperand(LOperand* op) { |
| 1068 ASSERT(op != NULL); | 1062 ASSERT(op != NULL); |
| 1069 if (op->IsConstantOperand()) { | 1063 if (op->IsConstantOperand()) { |
| 1070 LConstantOperand* const_op = LConstantOperand::cast(op); | 1064 LConstantOperand* const_op = LConstantOperand::cast(op); |
| 1071 HConstant* constant = chunk()->LookupConstant(const_op); | 1065 HConstant* constant = chunk()->LookupConstant(const_op); |
| 1072 Representation r = chunk_->LookupLiteralRepresentation(const_op); | 1066 Representation r = chunk_->LookupLiteralRepresentation(const_op); |
| 1073 if (r.IsInteger32()) { | 1067 if (r.IsSmi()) { |
| 1068 ASSERT(constant->HasSmiValue()); |
| 1069 return Operand(Smi::FromInt(constant->Integer32Value())); |
| 1070 } else if (r.IsInteger32()) { |
| 1074 ASSERT(constant->HasInteger32Value()); | 1071 ASSERT(constant->HasInteger32Value()); |
| 1075 return Operand(constant->Integer32Value()); | 1072 return Operand(constant->Integer32Value()); |
| 1076 } else if (r.IsDouble()) { | 1073 } else if (r.IsDouble()) { |
| 1077 Abort("ToOperand unsupported double immediate."); | 1074 Abort("ToOperand unsupported double immediate."); |
| 1078 } | 1075 } |
| 1079 ASSERT(r.IsTagged()); | 1076 ASSERT(r.IsTagged()); |
| 1080 return Operand(constant->handle()); | 1077 return Operand(constant->handle()); |
| 1081 } else if (op->IsRegister()) { | 1078 } else if (op->IsRegister()) { |
| 1082 return Operand(ToRegister(op)); | 1079 return Operand(ToRegister(op)); |
| 1083 } else if (op->IsDoubleRegister()) { | 1080 } else if (op->IsDoubleRegister()) { |
| (...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1293 Operand right = ToOperand32(instr->right()); | 1290 Operand right = ToOperand32(instr->right()); |
| 1294 if (can_overflow) { | 1291 if (can_overflow) { |
| 1295 __ Adds(result, left, right); | 1292 __ Adds(result, left, right); |
| 1296 DeoptimizeIf(vs, instr->environment()); | 1293 DeoptimizeIf(vs, instr->environment()); |
| 1297 } else { | 1294 } else { |
| 1298 __ Add(result, left, right); | 1295 __ Add(result, left, right); |
| 1299 } | 1296 } |
| 1300 } | 1297 } |
| 1301 | 1298 |
| 1302 | 1299 |
| 1300 void LCodeGen::DoAddS(LAddS* instr) { |
| 1301 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); |
| 1302 Register result = ToRegister(instr->result()); |
| 1303 Register left = ToRegister(instr->left()); |
| 1304 Operand right = ToOperand(instr->right()); |
| 1305 if (can_overflow) { |
| 1306 __ Adds(result, left, right); |
| 1307 DeoptimizeIf(vs, instr->environment()); |
| 1308 } else { |
| 1309 __ Add(result, left, right); |
| 1310 } |
| 1311 } |
| 1312 |
| 1313 |
| 1303 void LCodeGen::DoAllocate(LAllocate* instr) { | 1314 void LCodeGen::DoAllocate(LAllocate* instr) { |
| 1304 class DeferredAllocate: public LDeferredCode { | 1315 class DeferredAllocate: public LDeferredCode { |
| 1305 public: | 1316 public: |
| 1306 DeferredAllocate(LCodeGen* codegen, LAllocate* instr) | 1317 DeferredAllocate(LCodeGen* codegen, LAllocate* instr) |
| 1307 : LDeferredCode(codegen), instr_(instr) { } | 1318 : LDeferredCode(codegen), instr_(instr) { } |
| 1308 virtual void Generate() { codegen()->DoDeferredAllocate(instr_); } | 1319 virtual void Generate() { codegen()->DoDeferredAllocate(instr_); } |
| 1309 virtual LInstruction* instr() { return instr_; } | 1320 virtual LInstruction* instr() { return instr_; } |
| 1310 private: | 1321 private: |
| 1311 LAllocate* instr_; | 1322 LAllocate* instr_; |
| 1312 }; | 1323 }; |
| (...skipping 219 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1532 ASSERT(ToRegister(instr->left()).is(x1)); | 1543 ASSERT(ToRegister(instr->left()).is(x1)); |
| 1533 ASSERT(ToRegister(instr->right()).is(x0)); | 1544 ASSERT(ToRegister(instr->right()).is(x0)); |
| 1534 ASSERT(ToRegister(instr->result()).is(x0)); | 1545 ASSERT(ToRegister(instr->result()).is(x0)); |
| 1535 | 1546 |
| 1536 BinaryOpStub stub(instr->op(), NO_OVERWRITE); | 1547 BinaryOpStub stub(instr->op(), NO_OVERWRITE); |
| 1537 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 1548 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 1538 } | 1549 } |
| 1539 | 1550 |
| 1540 | 1551 |
| 1541 void LCodeGen::DoBitI(LBitI* instr) { | 1552 void LCodeGen::DoBitI(LBitI* instr) { |
| 1542 LOperand* left_op = instr->left(); | 1553 Register result = ToRegister32(instr->result()); |
| 1543 LOperand* right_op = instr->right(); | 1554 Register left = ToRegister32(instr->left()); |
| 1544 Register left = ToRegister(left_op); | 1555 Operand right = ToOperand32(instr->right()); |
| 1545 Register result = ToRegister(instr->result()); | |
| 1546 | |
| 1547 ASSERT(right_op->IsRegister() || right_op->IsConstantOperand()); | |
| 1548 Operand right = ToOperand(right_op); | |
| 1549 | 1556 |
| 1550 switch (instr->op()) { | 1557 switch (instr->op()) { |
| 1551 case Token::BIT_AND: __ And(result, left, right); break; | 1558 case Token::BIT_AND: __ And(result, left, right); break; |
| 1559 case Token::BIT_OR: __ Orr(result, left, right); break; |
| 1560 case Token::BIT_XOR: __ Eor(result, left, right); break; |
| 1561 default: |
| 1562 UNREACHABLE(); |
| 1563 break; |
| 1564 } |
| 1565 } |
| 1566 |
| 1567 |
| 1568 void LCodeGen::DoBitS(LBitS* instr) { |
| 1569 Register result = ToRegister(instr->result()); |
| 1570 Register left = ToRegister(instr->left()); |
| 1571 Operand right = ToOperand(instr->right()); |
| 1572 |
| 1573 switch (instr->op()) { |
| 1574 case Token::BIT_AND: __ And(result, left, right); break; |
| 1552 case Token::BIT_OR: __ Orr(result, left, right); break; | 1575 case Token::BIT_OR: __ Orr(result, left, right); break; |
| 1553 case Token::BIT_XOR: __ Eor(result, left, right); break; | 1576 case Token::BIT_XOR: __ Eor(result, left, right); break; |
| 1554 default: | 1577 default: |
| 1555 UNREACHABLE(); | 1578 UNREACHABLE(); |
| 1556 break; | 1579 break; |
| 1557 } | 1580 } |
| 1558 } | 1581 } |
| 1559 | 1582 |
| 1560 | 1583 |
| 1561 void LCodeGen::DoBitNotI(LBitNotI* instr) { | 1584 void LCodeGen::DoBitNotI(LBitNotI* instr) { |
| 1562 Register input = ToRegister(instr->value()).W(); | 1585 Register input = ToRegister(instr->value()).W(); |
| 1563 Register result = ToRegister(instr->result()).W(); | 1586 Register result = ToRegister(instr->result()).W(); |
| 1564 __ Mvn(result, input); | 1587 __ Mvn(result, input); |
| 1565 } | 1588 } |
| 1566 | 1589 |
| 1567 | 1590 |
| 1591 void LCodeGen::ApplyCheckIf(Condition cc, LBoundsCheck* check) { |
| 1592 if (FLAG_debug_code && check->hydrogen()->skip_check()) { |
| 1593 __ Assert(InvertCondition(cc), "eliminated bounds check failed"); |
| 1594 } else { |
| 1595 DeoptimizeIf(cc, check->environment()); |
| 1596 } |
| 1597 } |
| 1598 |
| 1599 |
| 1568 void LCodeGen::DoBoundsCheck(LBoundsCheck *instr) { | 1600 void LCodeGen::DoBoundsCheck(LBoundsCheck *instr) { |
| 1569 if (instr->hydrogen()->skip_check()) return; | 1601 if (instr->hydrogen()->skip_check()) return; |
| 1570 | 1602 |
| 1571 Register length = ToRegister(instr->length()); | 1603 Register length = ToRegister(instr->length()); |
| 1572 | 1604 |
| 1573 if (instr->index()->IsConstantOperand()) { | 1605 if (instr->index()->IsConstantOperand()) { |
| 1574 int constant_index = | 1606 int constant_index = |
| 1575 ToInteger32(LConstantOperand::cast(instr->index())); | 1607 ToInteger32(LConstantOperand::cast(instr->index())); |
| 1576 | 1608 |
| 1577 if (instr->hydrogen()->length()->representation().IsSmi()) { | 1609 if (instr->hydrogen()->length()->representation().IsSmi()) { |
| 1578 __ Cmp(length, Operand(Smi::FromInt(constant_index))); | 1610 __ Cmp(length, Operand(Smi::FromInt(constant_index))); |
| 1579 } else { | 1611 } else { |
| 1580 __ Cmp(length, Operand(constant_index)); | 1612 __ Cmp(length, Operand(constant_index)); |
| 1581 } | 1613 } |
| 1582 } else { | 1614 } else { |
| 1583 __ Cmp(length, ToRegister(instr->index())); | 1615 __ Cmp(length, ToRegister(instr->index())); |
| 1584 } | 1616 } |
| 1585 DeoptimizeIf(ls, instr->environment()); | 1617 Condition condition = instr->hydrogen()->allow_equality() ? lo : ls; |
| 1618 ApplyCheckIf(condition, instr); |
| 1586 } | 1619 } |
| 1587 | 1620 |
| 1588 | 1621 |
| 1589 void LCodeGen::DoBranch(LBranch* instr) { | 1622 void LCodeGen::DoBranch(LBranch* instr) { |
| 1590 Representation r = instr->hydrogen()->value()->representation(); | 1623 Representation r = instr->hydrogen()->value()->representation(); |
| 1591 Label* true_label = instr->TrueLabel(chunk_); | 1624 Label* true_label = instr->TrueLabel(chunk_); |
| 1592 Label* false_label = instr->FalseLabel(chunk_); | 1625 Label* false_label = instr->FalseLabel(chunk_); |
| 1593 | 1626 |
| 1594 if (r.IsInteger32()) { | 1627 if (r.IsInteger32()) { |
| 1595 ASSERT(!info()->IsStub()); | 1628 ASSERT(!info()->IsStub()); |
| (...skipping 298 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1894 } | 1927 } |
| 1895 | 1928 |
| 1896 | 1929 |
| 1897 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { | 1930 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { |
| 1898 // Record the address of the first unknown OSR value as the place to enter. | 1931 // Record the address of the first unknown OSR value as the place to enter. |
| 1899 if (osr_pc_offset_ == -1) osr_pc_offset_ = masm()->pc_offset(); | 1932 if (osr_pc_offset_ == -1) osr_pc_offset_ = masm()->pc_offset(); |
| 1900 } | 1933 } |
| 1901 | 1934 |
| 1902 | 1935 |
| 1903 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { | 1936 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { |
| 1937 if (instr->hydrogen()->CanOmitMapChecks()) { |
| 1938 ASSERT(instr->temp() == NULL); |
| 1939 return; |
| 1940 } |
| 1941 |
| 1904 Register object = ToRegister(instr->value()); | 1942 Register object = ToRegister(instr->value()); |
| 1905 Register map_reg = ToRegister(instr->temp()); | 1943 Register map_reg = ToRegister(instr->temp()); |
| 1906 | 1944 |
| 1907 Label success; | 1945 Label success; |
| 1908 SmallMapList* map_set = instr->hydrogen()->map_set(); | 1946 SmallMapList* map_set = instr->hydrogen()->map_set(); |
| 1909 __ Ldr(map_reg, FieldMemOperand(object, HeapObject::kMapOffset)); | 1947 __ Ldr(map_reg, FieldMemOperand(object, HeapObject::kMapOffset)); |
| 1910 for (int i = 0; i < map_set->length(); i++) { | 1948 for (int i = 0; i < map_set->length(); i++) { |
| 1911 Handle<Map> map = map_set->at(i); | 1949 Handle<Map> map = map_set->at(i); |
| 1912 __ CompareMap(map_reg, map, &success); | 1950 __ CompareMap(map_reg, map, &success); |
| 1913 __ B(eq, &success); | 1951 __ B(eq, &success); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 1924 if (!instr->hydrogen()->value()->IsHeapObject()) { | 1962 if (!instr->hydrogen()->value()->IsHeapObject()) { |
| 1925 // TODO(all): Depending of how we chose to implement the deopt, if we could | 1963 // TODO(all): Depending of how we chose to implement the deopt, if we could |
| 1926 // guarantee that we have a deopt handler reachable by a tbz instruction, | 1964 // guarantee that we have a deopt handler reachable by a tbz instruction, |
| 1927 // we could use tbz here and produce less code to support this instruction. | 1965 // we could use tbz here and produce less code to support this instruction. |
| 1928 DeoptimizeIfSmi(ToRegister(instr->value()), instr->environment()); | 1966 DeoptimizeIfSmi(ToRegister(instr->value()), instr->environment()); |
| 1929 } | 1967 } |
| 1930 } | 1968 } |
| 1931 | 1969 |
| 1932 | 1970 |
| 1933 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { | 1971 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { |
| 1972 if (instr->hydrogen()->CanOmitPrototypeChecks()) { |
| 1973 ASSERT(instr->temp1() == NULL); |
| 1974 ASSERT(instr->temp2() == NULL); |
| 1975 return; |
| 1976 } |
| 1977 |
| 1934 ZoneList<Handle<JSObject> >* prototypes = instr->prototypes(); | 1978 ZoneList<Handle<JSObject> >* prototypes = instr->prototypes(); |
| 1935 ZoneList<Handle<Map> >* maps = instr->maps(); | 1979 ZoneList<Handle<Map> >* maps = instr->maps(); |
| 1936 ASSERT(prototypes->length() == maps->length()); | 1980 ASSERT(prototypes->length() == maps->length()); |
| 1937 | 1981 |
| 1938 if (!instr->hydrogen()->CanOmitPrototypeChecks()) { | 1982 Label success, deopt; |
| 1939 // TODO(jbramley): The temp registers are only needed in this case. | 1983 Register temp1 = ToRegister(instr->temp1()); |
| 1940 Label success, deopt; | 1984 Register temp2 = ToRegister(instr->temp2()); |
| 1941 Register temp1 = ToRegister(instr->temp1()); | 1985 for (int i = 0; i < prototypes->length(); i++) { |
| 1942 Register temp2 = ToRegister(instr->temp2()); | 1986 __ LoadHeapObject(temp1, prototypes->at(i)); |
| 1943 for (int i = 0; i < prototypes->length(); i++) { | 1987 __ Ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset)); |
| 1944 __ LoadHeapObject(temp1, prototypes->at(i)); | 1988 __ CompareMap(temp2, maps->at(i), &success); |
| 1945 __ Ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset)); | 1989 __ B(eq, &success); |
| 1946 __ CompareMap(temp2, maps->at(i), &success); | |
| 1947 __ B(eq, &success); | |
| 1948 } | |
| 1949 // If we didn't match a map, deoptimize. | |
| 1950 Deoptimize(instr->environment()); | |
| 1951 __ Bind(&success); | |
| 1952 } | 1990 } |
| 1991 // If we didn't match a map, deoptimize. |
| 1992 Deoptimize(instr->environment()); |
| 1993 __ Bind(&success); |
| 1953 } | 1994 } |
| 1954 | 1995 |
| 1955 | 1996 |
| 1956 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { | 1997 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { |
| 1957 Register value = ToRegister(instr->value()); | 1998 Register value = ToRegister(instr->value()); |
| 1958 ASSERT(ToRegister(instr->result()).Is(value)); | 1999 ASSERT(ToRegister(instr->result()).Is(value)); |
| 1959 // TODO(all): See DoCheckNonSmi for comments on use of tbz. | 2000 // TODO(all): See DoCheckNonSmi for comments on use of tbz. |
| 1960 DeoptimizeIfNotSmi(value, instr->environment()); | 2001 DeoptimizeIfNotSmi(value, instr->environment()); |
| 1961 } | 2002 } |
| 1962 | 2003 |
| (...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2241 | 2282 |
| 2242 | 2283 |
| 2243 void LCodeGen::DoConstantS(LConstantS* instr) { | 2284 void LCodeGen::DoConstantS(LConstantS* instr) { |
| 2244 __ Mov(ToRegister(instr->result()), Operand(instr->value())); | 2285 __ Mov(ToRegister(instr->result()), Operand(instr->value())); |
| 2245 } | 2286 } |
| 2246 | 2287 |
| 2247 | 2288 |
| 2248 void LCodeGen::DoConstantT(LConstantT* instr) { | 2289 void LCodeGen::DoConstantT(LConstantT* instr) { |
| 2249 Handle<Object> value = instr->value(); | 2290 Handle<Object> value = instr->value(); |
| 2250 AllowDeferredHandleDereference smi_check; | 2291 AllowDeferredHandleDereference smi_check; |
| 2251 if (value->IsSmi()) { | 2292 __ LoadObject(ToRegister(instr->result()), value); |
| 2252 __ Mov(ToRegister(instr->result()), Operand(value)); | |
| 2253 } else { | |
| 2254 __ LoadHeapObject(ToRegister(instr->result()), | |
| 2255 Handle<HeapObject>::cast(value)); | |
| 2256 } | |
| 2257 } | 2293 } |
| 2258 | 2294 |
| 2259 | 2295 |
| 2260 void LCodeGen::DoContext(LContext* instr) { | 2296 void LCodeGen::DoContext(LContext* instr) { |
| 2261 // If there is a non-return use, the context must be moved to a register. | 2297 // If there is a non-return use, the context must be moved to a register. |
| 2262 Register result = ToRegister(instr->result()); | 2298 Register result = ToRegister(instr->result()); |
| 2263 // TODO(jbramley): LContext is only generated if it meets this condition, so | 2299 // TODO(jbramley): LContext is only generated if it meets this condition, so |
| 2264 // why not move cp unconditionally? | 2300 // why not move cp unconditionally? |
| 2265 for (HUseIterator it(instr->hydrogen()->uses()); !it.Done(); it.Advance()) { | 2301 for (HUseIterator it(instr->hydrogen()->uses()); !it.Done(); it.Advance()) { |
| 2266 if (!it.value()->IsReturn()) { | 2302 if (!it.value()->IsReturn()) { |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2335 __ Bind(&runtime); | 2371 __ Bind(&runtime); |
| 2336 __ Mov(x1, Operand(index)); | 2372 __ Mov(x1, Operand(index)); |
| 2337 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); | 2373 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); |
| 2338 } | 2374 } |
| 2339 | 2375 |
| 2340 __ Bind(&done); | 2376 __ Bind(&done); |
| 2341 } | 2377 } |
| 2342 | 2378 |
| 2343 | 2379 |
| 2344 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 2380 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
| 2345 if (instr->hydrogen_value()->IsSoftDeoptimize()) { | 2381 Deoptimizer::BailoutType type = instr->hydrogen()->type(); |
| 2346 SoftDeoptimize(instr->environment()); | 2382 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the |
| 2347 } else { | 2383 // needed return address), even though the implementation of LAZY and EAGER is |
| 2348 Deoptimize(instr->environment()); | 2384 // now identical. When LAZY is eventually completely folded into EAGER, remove |
| 2385 // the special case below. |
| 2386 if (info()->IsStub() && (type == Deoptimizer::EAGER)) { |
| 2387 type = Deoptimizer::LAZY; |
| 2349 } | 2388 } |
| 2389 Deoptimize(instr->environment(), type); |
| 2350 } | 2390 } |
| 2351 | 2391 |
| 2352 | 2392 |
| 2353 void LCodeGen::DoDivI(LDivI* instr) { | 2393 void LCodeGen::DoDivI(LDivI* instr) { |
| 2354 Register dividend = ToRegister32(instr->left()); | 2394 Register dividend = ToRegister32(instr->left()); |
| 2355 Register result = ToRegister32(instr->result()); | 2395 Register result = ToRegister32(instr->result()); |
| 2356 | 2396 |
| 2357 bool has_power_of_2_divisor = instr->hydrogen()->HasPowerOf2Divisor(); | 2397 bool has_power_of_2_divisor = instr->hydrogen()->HasPowerOf2Divisor(); |
| 2358 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); | 2398 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); |
| 2359 bool bailout_on_minus_zero = | 2399 bool bailout_on_minus_zero = |
| (...skipping 1037 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3397 int offset = index * kPointerSize; | 3437 int offset = index * kPointerSize; |
| 3398 if (index < 0) { | 3438 if (index < 0) { |
| 3399 // Negative property indices are in-object properties, indexed from the | 3439 // Negative property indices are in-object properties, indexed from the |
| 3400 // end of the fixed part of the object. | 3440 // end of the fixed part of the object. |
| 3401 __ Ldr(result, FieldMemOperand(object, offset + type->instance_size())); | 3441 __ Ldr(result, FieldMemOperand(object, offset + type->instance_size())); |
| 3402 } else { | 3442 } else { |
| 3403 // Non-negative property indices are in the properties array. | 3443 // Non-negative property indices are in the properties array. |
| 3404 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 3444 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
| 3405 __ Ldr(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize)); | 3445 __ Ldr(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize)); |
| 3406 } | 3446 } |
| 3407 } else if (lookup.IsConstantFunction()) { | 3447 } else if (lookup.IsConstant()) { |
| 3408 Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type)); | 3448 Handle<Object> constant(lookup.GetConstantFromMap(*type), isolate()); |
| 3409 __ LoadHeapObject(result, function); | 3449 __ LoadObject(result, constant); |
| 3410 } else { | 3450 } else { |
| 3411 // Negative lookup. Check prototypes. | 3451 // Negative lookup. Check prototypes. |
| 3412 Handle<HeapObject> current(HeapObject::cast((*type)->prototype())); | 3452 Handle<HeapObject> current(HeapObject::cast((*type)->prototype())); |
| 3413 Heap* heap = type->GetHeap(); | 3453 Heap* heap = type->GetHeap(); |
| 3414 while (*current != heap->null_value()) { | 3454 while (*current != heap->null_value()) { |
| 3415 __ LoadHeapObject(result, current); | 3455 __ LoadHeapObject(result, current); |
| 3416 __ CompareMap(result, result, Handle<Map>(current->map())); | 3456 __ CompareMap(result, result, Handle<Map>(current->map())); |
| 3417 DeoptimizeIf(ne, env); | 3457 DeoptimizeIf(ne, env); |
| 3418 current = | 3458 current = |
| 3419 Handle<HeapObject>(HeapObject::cast(current->map()->prototype())); | 3459 Handle<HeapObject>(HeapObject::cast(current->map()->prototype())); |
| (...skipping 545 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3965 | 4005 |
| 3966 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { | 4006 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { |
| 3967 HMathMinMax::Operation op = instr->hydrogen()->operation(); | 4007 HMathMinMax::Operation op = instr->hydrogen()->operation(); |
| 3968 if (instr->hydrogen()->representation().IsInteger32()) { | 4008 if (instr->hydrogen()->representation().IsInteger32()) { |
| 3969 Register result = ToRegister32(instr->result()); | 4009 Register result = ToRegister32(instr->result()); |
| 3970 Register left = ToRegister32(instr->left()); | 4010 Register left = ToRegister32(instr->left()); |
| 3971 Operand right = ToOperand32(instr->right()); | 4011 Operand right = ToOperand32(instr->right()); |
| 3972 | 4012 |
| 3973 __ Cmp(left, right); | 4013 __ Cmp(left, right); |
| 3974 __ Csel(result, left, right, (op == HMathMinMax::kMathMax) ? ge : le); | 4014 __ Csel(result, left, right, (op == HMathMinMax::kMathMax) ? ge : le); |
| 4015 } else if (instr->hydrogen()->representation().IsSmi()) { |
| 4016 Register result = ToRegister(instr->result()); |
| 4017 Register left = ToRegister(instr->left()); |
| 4018 Operand right = ToOperand(instr->right()); |
| 4019 |
| 4020 __ Cmp(left, right); |
| 4021 __ Csel(result, left, right, (op == HMathMinMax::kMathMax) ? ge : le); |
| 3975 } else { | 4022 } else { |
| 3976 ASSERT(instr->hydrogen()->representation().IsDouble()); | 4023 ASSERT(instr->hydrogen()->representation().IsDouble()); |
| 3977 DoubleRegister result = ToDoubleRegister(instr->result()); | 4024 DoubleRegister result = ToDoubleRegister(instr->result()); |
| 3978 DoubleRegister left = ToDoubleRegister(instr->left()); | 4025 DoubleRegister left = ToDoubleRegister(instr->left()); |
| 3979 DoubleRegister right = ToDoubleRegister(instr->right()); | 4026 DoubleRegister right = ToDoubleRegister(instr->right()); |
| 3980 | 4027 |
| 3981 if (op == HMathMinMax::kMathMax) { | 4028 if (op == HMathMinMax::kMathMax) { |
| 3982 __ Fmax(result, left, right); | 4029 __ Fmax(result, left, right); |
| 3983 } else { | 4030 } else { |
| 3984 ASSERT(op == HMathMinMax::kMathMin); | 4031 ASSERT(op == HMathMinMax::kMathMin); |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4040 } else { | 4087 } else { |
| 4041 DeoptimizeIfNegative(dividend, instr->environment()); | 4088 DeoptimizeIfNegative(dividend, instr->environment()); |
| 4042 } | 4089 } |
| 4043 } | 4090 } |
| 4044 } | 4091 } |
| 4045 __ Bind(&done); | 4092 __ Bind(&done); |
| 4046 } | 4093 } |
| 4047 | 4094 |
| 4048 | 4095 |
| 4049 void LCodeGen::DoMulConstI(LMulConstI* instr) { | 4096 void LCodeGen::DoMulConstI(LMulConstI* instr) { |
| 4097 // TODO(jbramley): Support smi operations (or make a separate DoMulConstS). |
| 4098 |
| 4050 Register result = ToRegister32(instr->result()); | 4099 Register result = ToRegister32(instr->result()); |
| 4051 Register left = ToRegister32(instr->left()); | 4100 Register left = ToRegister32(instr->left()); |
| 4052 int32_t right = ToInteger32(instr->right()); | 4101 int32_t right = ToInteger32(instr->right()); |
| 4053 | 4102 |
| 4054 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); | 4103 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); |
| 4055 bool bailout_on_minus_zero = | 4104 bool bailout_on_minus_zero = |
| 4056 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero); | 4105 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero); |
| 4057 | 4106 |
| 4058 if (bailout_on_minus_zero) { | 4107 if (bailout_on_minus_zero) { |
| 4059 if (right < 0) { | 4108 if (right < 0) { |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4132 } | 4181 } |
| 4133 } | 4182 } |
| 4134 break; | 4183 break; |
| 4135 } | 4184 } |
| 4136 } | 4185 } |
| 4137 | 4186 |
| 4138 | 4187 |
| 4139 void LCodeGen::DoMulI(LMulI* instr) { | 4188 void LCodeGen::DoMulI(LMulI* instr) { |
| 4140 Register result = ToRegister32(instr->result()); | 4189 Register result = ToRegister32(instr->result()); |
| 4141 Register left = ToRegister32(instr->left()); | 4190 Register left = ToRegister32(instr->left()); |
| 4191 Register right = ToRegister32(instr->right()); |
| 4142 | 4192 |
| 4143 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); | 4193 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); |
| 4144 bool bailout_on_minus_zero = | 4194 bool bailout_on_minus_zero = |
| 4145 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero); | 4195 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero); |
| 4146 | 4196 |
| 4147 Register right = ToRegister32(instr->right()); | |
| 4148 if (bailout_on_minus_zero) { | 4197 if (bailout_on_minus_zero) { |
| 4149 // If one operand is zero and the other is negative, the result is -0. | 4198 // If one operand is zero and the other is negative, the result is -0. |
| 4150 // - Set Z (eq) if either left or right, or both, are 0. | 4199 // - Set Z (eq) if either left or right, or both, are 0. |
| 4151 __ Cmp(left, 0); | 4200 __ Cmp(left, 0); |
| 4152 __ Ccmp(right, 0, ZFlag, ne); | 4201 __ Ccmp(right, 0, ZFlag, ne); |
| 4153 // - If so (eq), set N (mi) if left + right is negative. | 4202 // - If so (eq), set N (mi) if left + right is negative. |
| 4154 // - Otherwise, clear N. | 4203 // - Otherwise, clear N. |
| 4155 __ Ccmn(left, right, NoFlag, eq); | 4204 __ Ccmn(left, right, NoFlag, eq); |
| 4156 DeoptimizeIf(mi, instr->environment()); | 4205 DeoptimizeIf(mi, instr->environment()); |
| 4157 } | 4206 } |
| 4158 | 4207 |
| 4159 if (can_overflow) { | 4208 if (can_overflow) { |
| 4160 __ Smull(result.X(), left, right); | 4209 __ Smull(result.X(), left, right); |
| 4161 __ Cmp(result.X(), Operand(result, SXTW)); | 4210 __ Cmp(result.X(), Operand(result, SXTW)); |
| 4162 DeoptimizeIf(ne, instr->environment()); | 4211 DeoptimizeIf(ne, instr->environment()); |
| 4163 } else { | 4212 } else { |
| 4164 __ Mul(result, left, right); | 4213 __ Mul(result, left, right); |
| 4165 } | 4214 } |
| 4166 } | 4215 } |
| 4167 | 4216 |
| 4168 | 4217 |
| 4218 void LCodeGen::DoMulS(LMulS* instr) { |
| 4219 Register result = ToRegister(instr->result()); |
| 4220 Register left = ToRegister(instr->left()); |
| 4221 Register right = ToRegister(instr->right()); |
| 4222 |
| 4223 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); |
| 4224 bool bailout_on_minus_zero = |
| 4225 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero); |
| 4226 |
| 4227 if (bailout_on_minus_zero) { |
| 4228 // If one operand is zero and the other is negative, the result is -0. |
| 4229 // - Set Z (eq) if either left or right, or both, are 0. |
| 4230 __ Cmp(left, 0); |
| 4231 __ Ccmp(right, 0, ZFlag, ne); |
| 4232 // - If so (eq), set N (mi) if left + right is negative. |
| 4233 // - Otherwise, clear N. |
| 4234 __ Ccmn(left, right, NoFlag, eq); |
| 4235 DeoptimizeIf(mi, instr->environment()); |
| 4236 } |
| 4237 |
| 4238 STATIC_ASSERT((kSmiShift == 32) && (kSmiTag == 0)); |
| 4239 if (can_overflow) { |
| 4240 __ Smulh(result, left, right); |
| 4241 __ Cmp(result, Operand(result.W(), SXTW)); |
| 4242 __ SmiTag(result); |
| 4243 DeoptimizeIf(ne, instr->environment()); |
| 4244 } else { |
| 4245 // TODO(jbramley): This could be rewritten to support UseRegisterAtStart. |
| 4246 ASSERT(!AreAliased(result, right)); |
| 4247 __ SmiUntag(result, left); |
| 4248 __ Mul(result, result, right); |
| 4249 } |
| 4250 } |
| 4251 |
| 4252 |
| 4169 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { | 4253 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { |
| 4170 // TODO(3095996): Get rid of this. For now, we need to make the | 4254 // TODO(3095996): Get rid of this. For now, we need to make the |
| 4171 // result register contain a valid pointer because it is already | 4255 // result register contain a valid pointer because it is already |
| 4172 // contained in the register pointer map. | 4256 // contained in the register pointer map. |
| 4173 Register result = ToRegister(instr->result()); | 4257 Register result = ToRegister(instr->result()); |
| 4174 __ Mov(result, 0); | 4258 __ Mov(result, 0); |
| 4175 | 4259 |
| 4176 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 4260 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
| 4177 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); | 4261 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); |
| 4178 __ StoreToSafepointRegisterSlot(x0, result); | 4262 __ StoreToSafepointRegisterSlot(x0, result); |
| (...skipping 1083 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5262 CallRuntime(Runtime::kThrow, 1, instr); | 5346 CallRuntime(Runtime::kThrow, 1, instr); |
| 5263 | 5347 |
| 5264 if (FLAG_debug_code) { | 5348 if (FLAG_debug_code) { |
| 5265 __ Abort("Unreachable code in Throw."); | 5349 __ Abort("Unreachable code in Throw."); |
| 5266 } | 5350 } |
| 5267 } | 5351 } |
| 5268 | 5352 |
| 5269 | 5353 |
| 5270 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { | 5354 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { |
| 5271 Register object = ToRegister(instr->object()); | 5355 Register object = ToRegister(instr->object()); |
| 5356 Register temp1 = ToRegister(instr->temp1()); |
| 5272 | 5357 |
| 5273 Handle<Map> from_map = instr->original_map(); | 5358 Handle<Map> from_map = instr->original_map(); |
| 5274 Handle<Map> to_map = instr->transitioned_map(); | 5359 Handle<Map> to_map = instr->transitioned_map(); |
| 5275 ElementsKind from_kind = instr->from_kind(); | 5360 ElementsKind from_kind = instr->from_kind(); |
| 5276 ElementsKind to_kind = instr->to_kind(); | 5361 ElementsKind to_kind = instr->to_kind(); |
| 5277 | 5362 |
| 5278 Register scratch; | |
| 5279 if (IsSimpleMapChangeTransition(from_kind, to_kind)) { | |
| 5280 scratch = ToRegister(instr->temp1()); | |
| 5281 } else { | |
| 5282 ASSERT(FLAG_compiled_transitions || instr->IsMarkedAsCall()); | |
| 5283 scratch = x10; | |
| 5284 } | |
| 5285 | |
| 5286 Label not_applicable; | 5363 Label not_applicable; |
| 5287 __ CompareMap(object, scratch, from_map); | 5364 __ CheckMap(object, temp1, from_map, ¬_applicable, DONT_DO_SMI_CHECK); |
| 5288 __ B(ne, ¬_applicable); | |
| 5289 | 5365 |
| 5290 if (IsSimpleMapChangeTransition(from_kind, to_kind)) { | 5366 if (IsSimpleMapChangeTransition(from_kind, to_kind)) { |
| 5291 Register new_map = ToRegister(instr->temp2()); | 5367 Register new_map = ToRegister(instr->temp2()); |
| 5292 __ Mov(new_map, Operand(to_map)); | 5368 __ Mov(new_map, Operand(to_map)); |
| 5293 __ Str(new_map, FieldMemOperand(object, HeapObject::kMapOffset)); | 5369 __ Str(new_map, FieldMemOperand(object, HeapObject::kMapOffset)); |
| 5294 // Write barrier. | 5370 // Write barrier. |
| 5295 __ RecordWriteField(object, HeapObject::kMapOffset, new_map, scratch, | 5371 __ RecordWriteField(object, HeapObject::kMapOffset, new_map, temp1, |
| 5296 GetLinkRegisterState(), kDontSaveFPRegs); | 5372 GetLinkRegisterState(), kDontSaveFPRegs); |
| 5297 } else if (FLAG_compiled_transitions) { | 5373 } else { |
| 5298 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 5374 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
| 5299 __ Mov(x0, object); | 5375 __ Mov(x0, object); |
| 5300 __ Mov(x1, Operand(to_map)); | 5376 __ Mov(x1, Operand(to_map)); |
| 5301 TransitionElementsKindStub stub(from_kind, to_kind); | 5377 TransitionElementsKindStub stub(from_kind, to_kind); |
| 5302 __ CallStub(&stub); | 5378 __ CallStub(&stub); |
| 5303 RecordSafepointWithRegisters( | 5379 RecordSafepointWithRegisters( |
| 5304 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); | 5380 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
| 5305 } else if ((IsFastSmiElementsKind(from_kind) && | |
| 5306 IsFastDoubleElementsKind(to_kind)) || | |
| 5307 (IsFastDoubleElementsKind(from_kind) && | |
| 5308 IsFastObjectElementsKind(to_kind))) { | |
| 5309 ASSERT((instr->temp1() == NULL) && (instr->temp2() == NULL)); | |
| 5310 __ Mov(x2, object); | |
| 5311 __ Mov(x3, Operand(to_map)); | |
| 5312 if (IsFastSmiElementsKind(from_kind)) { | |
| 5313 CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(), | |
| 5314 RelocInfo::CODE_TARGET, instr); | |
| 5315 } else if (IsFastDoubleElementsKind(from_kind)) { | |
| 5316 CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(), | |
| 5317 RelocInfo::CODE_TARGET, instr); | |
| 5318 } | |
| 5319 } else { | |
| 5320 UNREACHABLE(); | |
| 5321 } | 5381 } |
| 5322 __ Bind(¬_applicable); | 5382 __ Bind(¬_applicable); |
| 5323 } | 5383 } |
| 5324 | 5384 |
| 5325 | 5385 |
| 5326 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { | 5386 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { |
| 5327 Register object = ToRegister(instr->object()); | 5387 Register object = ToRegister(instr->object()); |
| 5328 Register temp1 = ToRegister(instr->temp1()); | 5388 Register temp1 = ToRegister(instr->temp1()); |
| 5329 Register temp2 = ToRegister(instr->temp2()); | 5389 Register temp2 = ToRegister(instr->temp2()); |
| 5330 __ TestJSArrayForAllocationMemento(object, temp1, temp2); | 5390 __ TestJSArrayForAllocationMemento(object, temp1, temp2); |
| (...skipping 215 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5546 __ Bind(&out_of_object); | 5606 __ Bind(&out_of_object); |
| 5547 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 5607 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
| 5548 // Index is equal to negated out of object property index plus 1. | 5608 // Index is equal to negated out of object property index plus 1. |
| 5549 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 5609 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
| 5550 __ Ldr(result, FieldMemOperand(result, | 5610 __ Ldr(result, FieldMemOperand(result, |
| 5551 FixedArray::kHeaderSize - kPointerSize)); | 5611 FixedArray::kHeaderSize - kPointerSize)); |
| 5552 __ Bind(&done); | 5612 __ Bind(&done); |
| 5553 } | 5613 } |
| 5554 | 5614 |
| 5555 } } // namespace v8::internal | 5615 } } // namespace v8::internal |
| OLD | NEW |