OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 392 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
403 chunk_->LookupLiteralRepresentation(op).IsSmi(); | 403 chunk_->LookupLiteralRepresentation(op).IsSmi(); |
404 } | 404 } |
405 | 405 |
406 | 406 |
407 bool LCodeGen::IsTaggedConstant(LConstantOperand* op) const { | 407 bool LCodeGen::IsTaggedConstant(LConstantOperand* op) const { |
408 return op->IsConstantOperand() && | 408 return op->IsConstantOperand() && |
409 chunk_->LookupLiteralRepresentation(op).IsTagged(); | 409 chunk_->LookupLiteralRepresentation(op).IsTagged(); |
410 } | 410 } |
411 | 411 |
412 | 412 |
413 int LCodeGen::ToInteger32(LConstantOperand* op) const { | 413 int32_t LCodeGen::ToInteger32(LConstantOperand* op) const { |
414 HConstant* constant = chunk_->LookupConstant(op); | 414 HConstant* constant = chunk_->LookupConstant(op); |
415 return constant->Integer32Value(); | 415 return constant->Integer32Value(); |
416 } | 416 } |
417 | 417 |
418 | 418 |
419 Smi* LCodeGen::ToSmi(LConstantOperand* op) const { | 419 Smi* LCodeGen::ToSmi(LConstantOperand* op) const { |
420 HConstant* constant = chunk_->LookupConstant(op); | 420 HConstant* constant = chunk_->LookupConstant(op); |
421 return Smi::FromInt(constant->Integer32Value()); | 421 return Smi::FromInt(constant->Integer32Value()); |
422 } | 422 } |
423 | 423 |
(...skipping 266 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
690 | 690 |
691 void LCodeGen::DeoptimizeIf(Condition cc, | 691 void LCodeGen::DeoptimizeIf(Condition cc, |
692 LEnvironment* environment) { | 692 LEnvironment* environment) { |
693 Deoptimizer::BailoutType bailout_type = info()->IsStub() | 693 Deoptimizer::BailoutType bailout_type = info()->IsStub() |
694 ? Deoptimizer::LAZY | 694 ? Deoptimizer::LAZY |
695 : Deoptimizer::EAGER; | 695 : Deoptimizer::EAGER; |
696 DeoptimizeIf(cc, environment, bailout_type); | 696 DeoptimizeIf(cc, environment, bailout_type); |
697 } | 697 } |
698 | 698 |
699 | 699 |
700 void LCodeGen::SoftDeoptimize(LEnvironment* environment) { | |
701 ASSERT(!info()->IsStub()); | |
702 DeoptimizeIf(no_condition, environment, Deoptimizer::SOFT); | |
703 } | |
704 | |
705 | |
706 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) { | 700 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) { |
707 ZoneList<Handle<Map> > maps(1, zone()); | 701 ZoneList<Handle<Map> > maps(1, zone()); |
708 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); | 702 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); |
709 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { | 703 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { |
710 RelocInfo::Mode mode = it.rinfo()->rmode(); | 704 RelocInfo::Mode mode = it.rinfo()->rmode(); |
711 if (mode == RelocInfo::EMBEDDED_OBJECT && | 705 if (mode == RelocInfo::EMBEDDED_OBJECT && |
712 it.rinfo()->target_object()->IsMap()) { | 706 it.rinfo()->target_object()->IsMap()) { |
713 Handle<Map> map(Map::cast(it.rinfo()->target_object())); | 707 Handle<Map> map(Map::cast(it.rinfo()->target_object())); |
714 if (map->CanTransition()) { | 708 if (map->CanTransition()) { |
715 maps.Add(map, zone()); | 709 maps.Add(map, zone()); |
(...skipping 582 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1298 __ shll(left, Immediate(4)); | 1292 __ shll(left, Immediate(4)); |
1299 break; | 1293 break; |
1300 default: | 1294 default: |
1301 __ imull(left, left, Immediate(right_value)); | 1295 __ imull(left, left, Immediate(right_value)); |
1302 break; | 1296 break; |
1303 } | 1297 } |
1304 } else { | 1298 } else { |
1305 __ imull(left, left, Immediate(right_value)); | 1299 __ imull(left, left, Immediate(right_value)); |
1306 } | 1300 } |
1307 } else if (right->IsStackSlot()) { | 1301 } else if (right->IsStackSlot()) { |
1308 __ imull(left, ToOperand(right)); | 1302 if (instr->hydrogen_value()->representation().IsSmi()) { |
| 1303 __ SmiToInteger32(left, left); |
| 1304 __ imul(left, ToOperand(right)); |
| 1305 } else { |
| 1306 __ imull(left, ToOperand(right)); |
| 1307 } |
1309 } else { | 1308 } else { |
1310 __ imull(left, ToRegister(right)); | 1309 if (instr->hydrogen_value()->representation().IsSmi()) { |
| 1310 __ SmiToInteger32(left, left); |
| 1311 __ imul(left, ToRegister(right)); |
| 1312 } else { |
| 1313 __ imull(left, ToRegister(right)); |
| 1314 } |
1311 } | 1315 } |
1312 | 1316 |
1313 if (can_overflow) { | 1317 if (can_overflow) { |
1314 DeoptimizeIf(overflow, instr->environment()); | 1318 DeoptimizeIf(overflow, instr->environment()); |
1315 } | 1319 } |
1316 | 1320 |
1317 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1321 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1318 // Bail out if the result is supposed to be negative zero. | 1322 // Bail out if the result is supposed to be negative zero. |
1319 Label done; | 1323 Label done; |
1320 __ testl(left, left); | 1324 __ testl(left, left); |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1357 case Token::BIT_XOR: | 1361 case Token::BIT_XOR: |
1358 __ xorl(ToRegister(left), Immediate(right_operand)); | 1362 __ xorl(ToRegister(left), Immediate(right_operand)); |
1359 break; | 1363 break; |
1360 default: | 1364 default: |
1361 UNREACHABLE(); | 1365 UNREACHABLE(); |
1362 break; | 1366 break; |
1363 } | 1367 } |
1364 } else if (right->IsStackSlot()) { | 1368 } else if (right->IsStackSlot()) { |
1365 switch (instr->op()) { | 1369 switch (instr->op()) { |
1366 case Token::BIT_AND: | 1370 case Token::BIT_AND: |
1367 __ andl(ToRegister(left), ToOperand(right)); | 1371 __ and_(ToRegister(left), ToOperand(right)); |
1368 break; | 1372 break; |
1369 case Token::BIT_OR: | 1373 case Token::BIT_OR: |
1370 __ orl(ToRegister(left), ToOperand(right)); | 1374 __ or_(ToRegister(left), ToOperand(right)); |
1371 break; | 1375 break; |
1372 case Token::BIT_XOR: | 1376 case Token::BIT_XOR: |
1373 __ xorl(ToRegister(left), ToOperand(right)); | 1377 __ xor_(ToRegister(left), ToOperand(right)); |
1374 break; | 1378 break; |
1375 default: | 1379 default: |
1376 UNREACHABLE(); | 1380 UNREACHABLE(); |
1377 break; | 1381 break; |
1378 } | 1382 } |
1379 } else { | 1383 } else { |
1380 ASSERT(right->IsRegister()); | 1384 ASSERT(right->IsRegister()); |
1381 switch (instr->op()) { | 1385 switch (instr->op()) { |
1382 case Token::BIT_AND: | 1386 case Token::BIT_AND: |
1383 __ andl(ToRegister(left), ToRegister(right)); | 1387 __ and_(ToRegister(left), ToRegister(right)); |
1384 break; | 1388 break; |
1385 case Token::BIT_OR: | 1389 case Token::BIT_OR: |
1386 __ orl(ToRegister(left), ToRegister(right)); | 1390 __ or_(ToRegister(left), ToRegister(right)); |
1387 break; | 1391 break; |
1388 case Token::BIT_XOR: | 1392 case Token::BIT_XOR: |
1389 __ xorl(ToRegister(left), ToRegister(right)); | 1393 __ xor_(ToRegister(left), ToRegister(right)); |
1390 break; | 1394 break; |
1391 default: | 1395 default: |
1392 UNREACHABLE(); | 1396 UNREACHABLE(); |
1393 break; | 1397 break; |
1394 } | 1398 } |
1395 } | 1399 } |
1396 } | 1400 } |
1397 | 1401 |
1398 | 1402 |
1399 void LCodeGen::DoShiftI(LShiftI* instr) { | 1403 void LCodeGen::DoShiftI(LShiftI* instr) { |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1442 case Token::SHR: | 1446 case Token::SHR: |
1443 if (shift_count == 0 && instr->can_deopt()) { | 1447 if (shift_count == 0 && instr->can_deopt()) { |
1444 __ testl(ToRegister(left), ToRegister(left)); | 1448 __ testl(ToRegister(left), ToRegister(left)); |
1445 DeoptimizeIf(negative, instr->environment()); | 1449 DeoptimizeIf(negative, instr->environment()); |
1446 } else { | 1450 } else { |
1447 __ shrl(ToRegister(left), Immediate(shift_count)); | 1451 __ shrl(ToRegister(left), Immediate(shift_count)); |
1448 } | 1452 } |
1449 break; | 1453 break; |
1450 case Token::SHL: | 1454 case Token::SHL: |
1451 if (shift_count != 0) { | 1455 if (shift_count != 0) { |
1452 __ shll(ToRegister(left), Immediate(shift_count)); | 1456 if (instr->hydrogen_value()->representation().IsSmi()) { |
| 1457 __ shl(ToRegister(left), Immediate(shift_count)); |
| 1458 } else { |
| 1459 __ shll(ToRegister(left), Immediate(shift_count)); |
| 1460 } |
1453 } | 1461 } |
1454 break; | 1462 break; |
1455 default: | 1463 default: |
1456 UNREACHABLE(); | 1464 UNREACHABLE(); |
1457 break; | 1465 break; |
1458 } | 1466 } |
1459 } | 1467 } |
1460 } | 1468 } |
1461 | 1469 |
1462 | 1470 |
1463 void LCodeGen::DoSubI(LSubI* instr) { | 1471 void LCodeGen::DoSubI(LSubI* instr) { |
1464 LOperand* left = instr->left(); | 1472 LOperand* left = instr->left(); |
1465 LOperand* right = instr->right(); | 1473 LOperand* right = instr->right(); |
1466 ASSERT(left->Equals(instr->result())); | 1474 ASSERT(left->Equals(instr->result())); |
1467 | 1475 |
1468 if (right->IsConstantOperand()) { | 1476 if (right->IsConstantOperand()) { |
1469 __ subl(ToRegister(left), | 1477 __ subl(ToRegister(left), |
1470 Immediate(ToInteger32(LConstantOperand::cast(right)))); | 1478 Immediate(ToInteger32(LConstantOperand::cast(right)))); |
1471 } else if (right->IsRegister()) { | 1479 } else if (right->IsRegister()) { |
1472 __ subl(ToRegister(left), ToRegister(right)); | 1480 if (instr->hydrogen_value()->representation().IsSmi()) { |
| 1481 __ subq(ToRegister(left), ToRegister(right)); |
| 1482 } else { |
| 1483 __ subl(ToRegister(left), ToRegister(right)); |
| 1484 } |
1473 } else { | 1485 } else { |
1474 __ subl(ToRegister(left), ToOperand(right)); | 1486 if (instr->hydrogen_value()->representation().IsSmi()) { |
| 1487 __ subq(ToRegister(left), ToOperand(right)); |
| 1488 } else { |
| 1489 __ subl(ToRegister(left), ToOperand(right)); |
| 1490 } |
1475 } | 1491 } |
1476 | 1492 |
1477 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 1493 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
1478 DeoptimizeIf(overflow, instr->environment()); | 1494 DeoptimizeIf(overflow, instr->environment()); |
1479 } | 1495 } |
1480 } | 1496 } |
1481 | 1497 |
1482 | 1498 |
1483 void LCodeGen::DoConstantI(LConstantI* instr) { | 1499 void LCodeGen::DoConstantI(LConstantI* instr) { |
1484 __ Set(ToRegister(instr->result()), instr->value()); | 1500 __ Set(ToRegister(instr->result()), instr->value()); |
(...skipping 18 matching lines...) Expand all Loading... |
1503 Register tmp = ToRegister(instr->temp()); | 1519 Register tmp = ToRegister(instr->temp()); |
1504 __ Set(tmp, int_val); | 1520 __ Set(tmp, int_val); |
1505 __ movq(res, tmp); | 1521 __ movq(res, tmp); |
1506 } | 1522 } |
1507 } | 1523 } |
1508 | 1524 |
1509 | 1525 |
1510 void LCodeGen::DoConstantT(LConstantT* instr) { | 1526 void LCodeGen::DoConstantT(LConstantT* instr) { |
1511 Handle<Object> value = instr->value(); | 1527 Handle<Object> value = instr->value(); |
1512 AllowDeferredHandleDereference smi_check; | 1528 AllowDeferredHandleDereference smi_check; |
1513 if (value->IsSmi()) { | 1529 __ LoadObject(ToRegister(instr->result()), value); |
1514 __ Move(ToRegister(instr->result()), value); | |
1515 } else { | |
1516 __ LoadHeapObject(ToRegister(instr->result()), | |
1517 Handle<HeapObject>::cast(value)); | |
1518 } | |
1519 } | 1530 } |
1520 | 1531 |
1521 | 1532 |
1522 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) { | 1533 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) { |
1523 Register result = ToRegister(instr->result()); | 1534 Register result = ToRegister(instr->result()); |
1524 Register map = ToRegister(instr->value()); | 1535 Register map = ToRegister(instr->value()); |
1525 __ EnumLength(result, map); | 1536 __ EnumLength(result, map); |
1526 } | 1537 } |
1527 | 1538 |
1528 | 1539 |
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1650 LOperand* left = instr->left(); | 1661 LOperand* left = instr->left(); |
1651 LOperand* right = instr->right(); | 1662 LOperand* right = instr->right(); |
1652 | 1663 |
1653 if (LAddI::UseLea(instr->hydrogen()) && !left->Equals(instr->result())) { | 1664 if (LAddI::UseLea(instr->hydrogen()) && !left->Equals(instr->result())) { |
1654 if (right->IsConstantOperand()) { | 1665 if (right->IsConstantOperand()) { |
1655 int32_t offset = ToInteger32(LConstantOperand::cast(right)); | 1666 int32_t offset = ToInteger32(LConstantOperand::cast(right)); |
1656 __ leal(ToRegister(instr->result()), | 1667 __ leal(ToRegister(instr->result()), |
1657 MemOperand(ToRegister(left), offset)); | 1668 MemOperand(ToRegister(left), offset)); |
1658 } else { | 1669 } else { |
1659 Operand address(ToRegister(left), ToRegister(right), times_1, 0); | 1670 Operand address(ToRegister(left), ToRegister(right), times_1, 0); |
1660 __ leal(ToRegister(instr->result()), address); | 1671 if (instr->hydrogen()->representation().IsSmi()) { |
| 1672 __ lea(ToRegister(instr->result()), address); |
| 1673 } else { |
| 1674 __ leal(ToRegister(instr->result()), address); |
| 1675 } |
1661 } | 1676 } |
1662 } else { | 1677 } else { |
1663 if (right->IsConstantOperand()) { | 1678 if (right->IsConstantOperand()) { |
1664 __ addl(ToRegister(left), | 1679 __ addl(ToRegister(left), |
1665 Immediate(ToInteger32(LConstantOperand::cast(right)))); | 1680 Immediate(ToInteger32(LConstantOperand::cast(right)))); |
1666 } else if (right->IsRegister()) { | 1681 } else if (right->IsRegister()) { |
1667 __ addl(ToRegister(left), ToRegister(right)); | 1682 if (instr->hydrogen_value()->representation().IsSmi()) { |
| 1683 __ addq(ToRegister(left), ToRegister(right)); |
| 1684 } else { |
| 1685 __ addl(ToRegister(left), ToRegister(right)); |
| 1686 } |
1668 } else { | 1687 } else { |
1669 __ addl(ToRegister(left), ToOperand(right)); | 1688 if (instr->hydrogen_value()->representation().IsSmi()) { |
| 1689 __ addq(ToRegister(left), ToOperand(right)); |
| 1690 } else { |
| 1691 __ addl(ToRegister(left), ToOperand(right)); |
| 1692 } |
1670 } | 1693 } |
1671 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 1694 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
1672 DeoptimizeIf(overflow, instr->environment()); | 1695 DeoptimizeIf(overflow, instr->environment()); |
1673 } | 1696 } |
1674 } | 1697 } |
1675 } | 1698 } |
1676 | 1699 |
1677 | 1700 |
1678 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { | 1701 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { |
1679 LOperand* left = instr->left(); | 1702 LOperand* left = instr->left(); |
1680 LOperand* right = instr->right(); | 1703 LOperand* right = instr->right(); |
1681 ASSERT(left->Equals(instr->result())); | 1704 ASSERT(left->Equals(instr->result())); |
1682 HMathMinMax::Operation operation = instr->hydrogen()->operation(); | 1705 HMathMinMax::Operation operation = instr->hydrogen()->operation(); |
1683 if (instr->hydrogen()->representation().IsInteger32()) { | 1706 if (instr->hydrogen()->representation().IsSmiOrInteger32()) { |
1684 Label return_left; | 1707 Label return_left; |
1685 Condition condition = (operation == HMathMinMax::kMathMin) | 1708 Condition condition = (operation == HMathMinMax::kMathMin) |
1686 ? less_equal | 1709 ? less_equal |
1687 : greater_equal; | 1710 : greater_equal; |
1688 Register left_reg = ToRegister(left); | 1711 Register left_reg = ToRegister(left); |
1689 if (right->IsConstantOperand()) { | 1712 if (right->IsConstantOperand()) { |
1690 Immediate right_imm = | 1713 Immediate right_imm = |
1691 Immediate(ToInteger32(LConstantOperand::cast(right))); | 1714 Immediate(ToInteger32(LConstantOperand::cast(right))); |
| 1715 ASSERT(!instr->hydrogen_value()->representation().IsSmi()); |
1692 __ cmpl(left_reg, right_imm); | 1716 __ cmpl(left_reg, right_imm); |
1693 __ j(condition, &return_left, Label::kNear); | 1717 __ j(condition, &return_left, Label::kNear); |
1694 __ movq(left_reg, right_imm); | 1718 __ movq(left_reg, right_imm); |
1695 } else if (right->IsRegister()) { | 1719 } else if (right->IsRegister()) { |
1696 Register right_reg = ToRegister(right); | 1720 Register right_reg = ToRegister(right); |
1697 __ cmpl(left_reg, right_reg); | 1721 if (instr->hydrogen_value()->representation().IsSmi()) { |
| 1722 __ cmpq(left_reg, right_reg); |
| 1723 } else { |
| 1724 __ cmpl(left_reg, right_reg); |
| 1725 } |
1698 __ j(condition, &return_left, Label::kNear); | 1726 __ j(condition, &return_left, Label::kNear); |
1699 __ movq(left_reg, right_reg); | 1727 __ movq(left_reg, right_reg); |
1700 } else { | 1728 } else { |
1701 Operand right_op = ToOperand(right); | 1729 Operand right_op = ToOperand(right); |
1702 __ cmpl(left_reg, right_op); | 1730 if (instr->hydrogen_value()->representation().IsSmi()) { |
| 1731 __ cmpq(left_reg, right_op); |
| 1732 } else { |
| 1733 __ cmpl(left_reg, right_op); |
| 1734 } |
1703 __ j(condition, &return_left, Label::kNear); | 1735 __ j(condition, &return_left, Label::kNear); |
1704 __ movq(left_reg, right_op); | 1736 __ movq(left_reg, right_op); |
1705 } | 1737 } |
1706 __ bind(&return_left); | 1738 __ bind(&return_left); |
1707 } else { | 1739 } else { |
1708 ASSERT(instr->hydrogen()->representation().IsDouble()); | 1740 ASSERT(instr->hydrogen()->representation().IsDouble()); |
1709 Label check_nan_left, check_zero, return_left, return_right; | 1741 Label check_nan_left, check_zero, return_left, return_right; |
1710 Condition condition = (operation == HMathMinMax::kMathMin) ? below : above; | 1742 Condition condition = (operation == HMathMinMax::kMathMin) ? below : above; |
1711 XMMRegister left_reg = ToDoubleRegister(left); | 1743 XMMRegister left_reg = ToDoubleRegister(left); |
1712 XMMRegister right_reg = ToDoubleRegister(right); | 1744 XMMRegister right_reg = ToDoubleRegister(right); |
(...skipping 367 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2080 Handle<Object> right = ToHandle(LConstantOperand::cast(instr->right())); | 2112 Handle<Object> right = ToHandle(LConstantOperand::cast(instr->right())); |
2081 __ CmpObject(left, right); | 2113 __ CmpObject(left, right); |
2082 } else { | 2114 } else { |
2083 Register right = ToRegister(instr->right()); | 2115 Register right = ToRegister(instr->right()); |
2084 __ cmpq(left, right); | 2116 __ cmpq(left, right); |
2085 } | 2117 } |
2086 EmitBranch(instr, equal); | 2118 EmitBranch(instr, equal); |
2087 } | 2119 } |
2088 | 2120 |
2089 | 2121 |
2090 void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) { | |
2091 Register left = ToRegister(instr->left()); | |
2092 | |
2093 __ cmpq(left, Immediate(instr->hydrogen()->right())); | |
2094 EmitBranch(instr, equal); | |
2095 } | |
2096 | |
2097 | |
2098 Condition LCodeGen::EmitIsObject(Register input, | 2122 Condition LCodeGen::EmitIsObject(Register input, |
2099 Label* is_not_object, | 2123 Label* is_not_object, |
2100 Label* is_object) { | 2124 Label* is_object) { |
2101 ASSERT(!input.is(kScratchRegister)); | 2125 ASSERT(!input.is(kScratchRegister)); |
2102 | 2126 |
2103 __ JumpIfSmi(input, is_not_object); | 2127 __ JumpIfSmi(input, is_not_object); |
2104 | 2128 |
2105 __ CompareRoot(input, Heap::kNullValueRootIndex); | 2129 __ CompareRoot(input, Heap::kNullValueRootIndex); |
2106 __ j(equal, is_object); | 2130 __ j(equal, is_object); |
2107 | 2131 |
(...skipping 588 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2696 int offset = index * kPointerSize; | 2720 int offset = index * kPointerSize; |
2697 if (index < 0) { | 2721 if (index < 0) { |
2698 // Negative property indices are in-object properties, indexed | 2722 // Negative property indices are in-object properties, indexed |
2699 // from the end of the fixed part of the object. | 2723 // from the end of the fixed part of the object. |
2700 __ movq(result, FieldOperand(object, offset + type->instance_size())); | 2724 __ movq(result, FieldOperand(object, offset + type->instance_size())); |
2701 } else { | 2725 } else { |
2702 // Non-negative property indices are in the properties array. | 2726 // Non-negative property indices are in the properties array. |
2703 __ movq(result, FieldOperand(object, JSObject::kPropertiesOffset)); | 2727 __ movq(result, FieldOperand(object, JSObject::kPropertiesOffset)); |
2704 __ movq(result, FieldOperand(result, offset + FixedArray::kHeaderSize)); | 2728 __ movq(result, FieldOperand(result, offset + FixedArray::kHeaderSize)); |
2705 } | 2729 } |
2706 } else if (lookup.IsConstantFunction()) { | 2730 } else if (lookup.IsConstant()) { |
2707 Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type)); | 2731 Handle<Object> constant(lookup.GetConstantFromMap(*type), isolate()); |
2708 __ LoadHeapObject(result, function); | 2732 __ LoadObject(result, constant); |
2709 } else { | 2733 } else { |
2710 // Negative lookup. | 2734 // Negative lookup. |
2711 // Check prototypes. | 2735 // Check prototypes. |
2712 Handle<HeapObject> current(HeapObject::cast((*type)->prototype())); | 2736 Handle<HeapObject> current(HeapObject::cast((*type)->prototype())); |
2713 Heap* heap = type->GetHeap(); | 2737 Heap* heap = type->GetHeap(); |
2714 while (*current != heap->null_value()) { | 2738 while (*current != heap->null_value()) { |
2715 __ LoadHeapObject(result, current); | 2739 __ LoadHeapObject(result, current); |
2716 __ Cmp(FieldOperand(result, HeapObject::kMapOffset), | 2740 __ Cmp(FieldOperand(result, HeapObject::kMapOffset), |
2717 Handle<Map>(current->map())); | 2741 Handle<Map>(current->map())); |
2718 DeoptimizeIf(not_equal, env); | 2742 DeoptimizeIf(not_equal, env); |
(...skipping 10 matching lines...) Expand all Loading... |
2729 static bool CompactEmit(SmallMapList* list, | 2753 static bool CompactEmit(SmallMapList* list, |
2730 Handle<String> name, | 2754 Handle<String> name, |
2731 int i, | 2755 int i, |
2732 Isolate* isolate) { | 2756 Isolate* isolate) { |
2733 Handle<Map> map = list->at(i); | 2757 Handle<Map> map = list->at(i); |
2734 // If the map has ElementsKind transitions, we will generate map checks | 2758 // If the map has ElementsKind transitions, we will generate map checks |
2735 // for each kind in __ CompareMap(..., ALLOW_ELEMENTS_TRANSITION_MAPS). | 2759 // for each kind in __ CompareMap(..., ALLOW_ELEMENTS_TRANSITION_MAPS). |
2736 if (map->HasElementsTransition()) return false; | 2760 if (map->HasElementsTransition()) return false; |
2737 LookupResult lookup(isolate); | 2761 LookupResult lookup(isolate); |
2738 map->LookupDescriptor(NULL, *name, &lookup); | 2762 map->LookupDescriptor(NULL, *name, &lookup); |
2739 return lookup.IsField() || lookup.IsConstantFunction(); | 2763 return lookup.IsField() || lookup.IsConstant(); |
2740 } | 2764 } |
2741 | 2765 |
2742 | 2766 |
2743 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) { | 2767 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) { |
2744 Register object = ToRegister(instr->object()); | 2768 Register object = ToRegister(instr->object()); |
2745 Register result = ToRegister(instr->result()); | 2769 Register result = ToRegister(instr->result()); |
2746 | 2770 |
2747 int map_count = instr->hydrogen()->types()->length(); | 2771 int map_count = instr->hydrogen()->types()->length(); |
2748 bool need_generic = instr->hydrogen()->need_generic(); | 2772 bool need_generic = instr->hydrogen()->need_generic(); |
2749 | 2773 |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2879 void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) { | 2903 void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) { |
2880 ElementsKind elements_kind = instr->elements_kind(); | 2904 ElementsKind elements_kind = instr->elements_kind(); |
2881 LOperand* key = instr->key(); | 2905 LOperand* key = instr->key(); |
2882 if (!key->IsConstantOperand()) { | 2906 if (!key->IsConstantOperand()) { |
2883 Register key_reg = ToRegister(key); | 2907 Register key_reg = ToRegister(key); |
2884 // Even though the HLoad/StoreKeyed (in this case) instructions force | 2908 // Even though the HLoad/StoreKeyed (in this case) instructions force |
2885 // the input representation for the key to be an integer, the input | 2909 // the input representation for the key to be an integer, the input |
2886 // gets replaced during bound check elimination with the index argument | 2910 // gets replaced during bound check elimination with the index argument |
2887 // to the bounds check, which can be tagged, so that case must be | 2911 // to the bounds check, which can be tagged, so that case must be |
2888 // handled here, too. | 2912 // handled here, too. |
2889 if (instr->hydrogen()->key()->representation().IsSmi()) { | 2913 if (instr->hydrogen()->IsDehoisted()) { |
2890 __ SmiToInteger64(key_reg, key_reg); | |
2891 } else if (instr->hydrogen()->IsDehoisted()) { | |
2892 // Sign extend key because it could be a 32 bit negative value | 2914 // Sign extend key because it could be a 32 bit negative value |
2893 // and the dehoisted address computation happens in 64 bits | 2915 // and the dehoisted address computation happens in 64 bits |
2894 __ movsxlq(key_reg, key_reg); | 2916 __ movsxlq(key_reg, key_reg); |
2895 } | 2917 } |
2896 } | 2918 } |
2897 Operand operand(BuildFastArrayOperand( | 2919 Operand operand(BuildFastArrayOperand( |
2898 instr->elements(), | 2920 instr->elements(), |
2899 key, | 2921 key, |
2900 elements_kind, | 2922 elements_kind, |
2901 0, | 2923 0, |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2952 | 2974 |
2953 void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) { | 2975 void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) { |
2954 XMMRegister result(ToDoubleRegister(instr->result())); | 2976 XMMRegister result(ToDoubleRegister(instr->result())); |
2955 LOperand* key = instr->key(); | 2977 LOperand* key = instr->key(); |
2956 if (!key->IsConstantOperand()) { | 2978 if (!key->IsConstantOperand()) { |
2957 Register key_reg = ToRegister(key); | 2979 Register key_reg = ToRegister(key); |
2958 // Even though the HLoad/StoreKeyed instructions force the input | 2980 // Even though the HLoad/StoreKeyed instructions force the input |
2959 // representation for the key to be an integer, the input gets replaced | 2981 // representation for the key to be an integer, the input gets replaced |
2960 // during bound check elimination with the index argument to the bounds | 2982 // during bound check elimination with the index argument to the bounds |
2961 // check, which can be tagged, so that case must be handled here, too. | 2983 // check, which can be tagged, so that case must be handled here, too. |
2962 if (instr->hydrogen()->key()->representation().IsSmi()) { | 2984 if (instr->hydrogen()->IsDehoisted()) { |
2963 __ SmiToInteger64(key_reg, key_reg); | |
2964 } else if (instr->hydrogen()->IsDehoisted()) { | |
2965 // Sign extend key because it could be a 32 bit negative value | 2985 // Sign extend key because it could be a 32 bit negative value |
2966 // and the dehoisted address computation happens in 64 bits | 2986 // and the dehoisted address computation happens in 64 bits |
2967 __ movsxlq(key_reg, key_reg); | 2987 __ movsxlq(key_reg, key_reg); |
2968 } | 2988 } |
2969 } | 2989 } |
2970 | 2990 |
2971 if (instr->hydrogen()->RequiresHoleCheck()) { | 2991 if (instr->hydrogen()->RequiresHoleCheck()) { |
2972 int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag + | 2992 int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag + |
2973 sizeof(kHoleNanLower32); | 2993 sizeof(kHoleNanLower32); |
2974 Operand hole_check_operand = BuildFastArrayOperand( | 2994 Operand hole_check_operand = BuildFastArrayOperand( |
(...skipping 19 matching lines...) Expand all Loading... |
2994 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { | 3014 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { |
2995 Register result = ToRegister(instr->result()); | 3015 Register result = ToRegister(instr->result()); |
2996 LOperand* key = instr->key(); | 3016 LOperand* key = instr->key(); |
2997 if (!key->IsConstantOperand()) { | 3017 if (!key->IsConstantOperand()) { |
2998 Register key_reg = ToRegister(key); | 3018 Register key_reg = ToRegister(key); |
2999 // Even though the HLoad/StoreKeyedFastElement instructions force | 3019 // Even though the HLoad/StoreKeyedFastElement instructions force |
3000 // the input representation for the key to be an integer, the input | 3020 // the input representation for the key to be an integer, the input |
3001 // gets replaced during bound check elimination with the index | 3021 // gets replaced during bound check elimination with the index |
3002 // argument to the bounds check, which can be tagged, so that | 3022 // argument to the bounds check, which can be tagged, so that |
3003 // case must be handled here, too. | 3023 // case must be handled here, too. |
3004 if (instr->hydrogen()->key()->representation().IsSmi()) { | 3024 if (instr->hydrogen()->IsDehoisted()) { |
3005 __ SmiToInteger64(key_reg, key_reg); | |
3006 } else if (instr->hydrogen()->IsDehoisted()) { | |
3007 // Sign extend key because it could be a 32 bit negative value | 3025 // Sign extend key because it could be a 32 bit negative value |
3008 // and the dehoisted address computation happens in 64 bits | 3026 // and the dehoisted address computation happens in 64 bits |
3009 __ movsxlq(key_reg, key_reg); | 3027 __ movsxlq(key_reg, key_reg); |
3010 } | 3028 } |
3011 } | 3029 } |
3012 | 3030 |
3013 // Load the result. | 3031 // Load the result. |
3014 __ movq(result, | 3032 __ movq(result, |
3015 BuildFastArrayOperand(instr->elements(), | 3033 BuildFastArrayOperand(instr->elements(), |
3016 key, | 3034 key, |
(...skipping 319 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3336 RDI_UNINITIALIZED); | 3354 RDI_UNINITIALIZED); |
3337 } | 3355 } |
3338 | 3356 |
3339 | 3357 |
3340 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { | 3358 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { |
3341 Register input_reg = ToRegister(instr->value()); | 3359 Register input_reg = ToRegister(instr->value()); |
3342 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), | 3360 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), |
3343 Heap::kHeapNumberMapRootIndex); | 3361 Heap::kHeapNumberMapRootIndex); |
3344 DeoptimizeIf(not_equal, instr->environment()); | 3362 DeoptimizeIf(not_equal, instr->environment()); |
3345 | 3363 |
3346 Label done; | 3364 Label slow, allocated, done; |
3347 Register tmp = input_reg.is(rax) ? rcx : rax; | 3365 Register tmp = input_reg.is(rax) ? rcx : rax; |
3348 Register tmp2 = tmp.is(rcx) ? rdx : input_reg.is(rcx) ? rdx : rcx; | 3366 Register tmp2 = tmp.is(rcx) ? rdx : input_reg.is(rcx) ? rdx : rcx; |
3349 | 3367 |
3350 // Preserve the value of all registers. | 3368 // Preserve the value of all registers. |
3351 PushSafepointRegistersScope scope(this); | 3369 PushSafepointRegistersScope scope(this); |
3352 | 3370 |
3353 Label negative; | |
3354 __ movl(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); | 3371 __ movl(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); |
3355 // Check the sign of the argument. If the argument is positive, just | 3372 // Check the sign of the argument. If the argument is positive, just |
3356 // return it. We do not need to patch the stack since |input| and | 3373 // return it. We do not need to patch the stack since |input| and |
3357 // |result| are the same register and |input| will be restored | 3374 // |result| are the same register and |input| will be restored |
3358 // unchanged by popping safepoint registers. | 3375 // unchanged by popping safepoint registers. |
3359 __ testl(tmp, Immediate(HeapNumber::kSignMask)); | 3376 __ testl(tmp, Immediate(HeapNumber::kSignMask)); |
3360 __ j(not_zero, &negative); | 3377 __ j(zero, &done); |
3361 __ jmp(&done); | |
3362 | 3378 |
3363 __ bind(&negative); | |
3364 | |
3365 Label allocated, slow; | |
3366 __ AllocateHeapNumber(tmp, tmp2, &slow); | 3379 __ AllocateHeapNumber(tmp, tmp2, &slow); |
3367 __ jmp(&allocated); | 3380 __ jmp(&allocated, Label::kNear); |
3368 | 3381 |
3369 // Slow case: Call the runtime system to do the number allocation. | 3382 // Slow case: Call the runtime system to do the number allocation. |
3370 __ bind(&slow); | 3383 __ bind(&slow); |
3371 | |
3372 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); | 3384 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); |
3373 // Set the pointer to the new heap number in tmp. | 3385 // Set the pointer to the new heap number in tmp. |
3374 if (!tmp.is(rax)) { | 3386 if (!tmp.is(rax)) __ movq(tmp, rax); |
3375 __ movq(tmp, rax); | |
3376 } | |
3377 | |
3378 // Restore input_reg after call to runtime. | 3387 // Restore input_reg after call to runtime. |
3379 __ LoadFromSafepointRegisterSlot(input_reg, input_reg); | 3388 __ LoadFromSafepointRegisterSlot(input_reg, input_reg); |
3380 | 3389 |
3381 __ bind(&allocated); | 3390 __ bind(&allocated); |
3382 __ movq(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 3391 __ movq(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
3383 __ shl(tmp2, Immediate(1)); | 3392 __ shl(tmp2, Immediate(1)); |
3384 __ shr(tmp2, Immediate(1)); | 3393 __ shr(tmp2, Immediate(1)); |
3385 __ movq(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2); | 3394 __ movq(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2); |
3386 __ StoreToSafepointRegisterSlot(input_reg, tmp); | 3395 __ StoreToSafepointRegisterSlot(input_reg, tmp); |
3387 | 3396 |
3388 __ bind(&done); | 3397 __ bind(&done); |
3389 } | 3398 } |
3390 | 3399 |
3391 | 3400 |
3392 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { | 3401 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { |
3393 Register input_reg = ToRegister(instr->value()); | 3402 Register input_reg = ToRegister(instr->value()); |
3394 __ testl(input_reg, input_reg); | 3403 __ testl(input_reg, input_reg); |
3395 Label is_positive; | 3404 Label is_positive; |
3396 __ j(not_sign, &is_positive); | 3405 __ j(not_sign, &is_positive, Label::kNear); |
3397 __ negl(input_reg); // Sets flags. | 3406 __ negl(input_reg); // Sets flags. |
3398 DeoptimizeIf(negative, instr->environment()); | 3407 DeoptimizeIf(negative, instr->environment()); |
3399 __ bind(&is_positive); | 3408 __ bind(&is_positive); |
3400 } | 3409 } |
3401 | 3410 |
3402 | 3411 |
3403 void LCodeGen::DoMathAbs(LMathAbs* instr) { | 3412 void LCodeGen::DoMathAbs(LMathAbs* instr) { |
3404 // Class for deferred case. | 3413 // Class for deferred case. |
3405 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode { | 3414 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode { |
3406 public: | 3415 public: |
(...skipping 609 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4016 ASSERT(ToRegister(instr->value()).is(rax)); | 4025 ASSERT(ToRegister(instr->value()).is(rax)); |
4017 | 4026 |
4018 __ Move(rcx, instr->hydrogen()->name()); | 4027 __ Move(rcx, instr->hydrogen()->name()); |
4019 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) | 4028 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) |
4020 ? isolate()->builtins()->StoreIC_Initialize_Strict() | 4029 ? isolate()->builtins()->StoreIC_Initialize_Strict() |
4021 : isolate()->builtins()->StoreIC_Initialize(); | 4030 : isolate()->builtins()->StoreIC_Initialize(); |
4022 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 4031 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
4023 } | 4032 } |
4024 | 4033 |
4025 | 4034 |
| 4035 void LCodeGen::ApplyCheckIf(Condition cc, LBoundsCheck* check) { |
| 4036 if (FLAG_debug_code && check->hydrogen()->skip_check()) { |
| 4037 Label done; |
| 4038 __ j(NegateCondition(cc), &done, Label::kNear); |
| 4039 __ int3(); |
| 4040 __ bind(&done); |
| 4041 } else { |
| 4042 DeoptimizeIf(cc, check->environment()); |
| 4043 } |
| 4044 } |
| 4045 |
| 4046 |
4026 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { | 4047 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { |
4027 if (instr->hydrogen()->skip_check()) return; | 4048 if (instr->hydrogen()->skip_check()) return; |
4028 | 4049 |
4029 if (instr->length()->IsRegister()) { | 4050 if (instr->length()->IsRegister()) { |
4030 Register reg = ToRegister(instr->length()); | 4051 Register reg = ToRegister(instr->length()); |
4031 if (!instr->hydrogen()->length()->representation().IsSmi()) { | 4052 if (!instr->hydrogen()->length()->representation().IsSmi()) { |
4032 __ AssertZeroExtended(reg); | 4053 __ AssertZeroExtended(reg); |
4033 } | 4054 } |
4034 if (instr->index()->IsConstantOperand()) { | 4055 if (instr->index()->IsConstantOperand()) { |
4035 int constant_index = | 4056 int constant_index = |
(...skipping 17 matching lines...) Expand all Loading... |
4053 ToInteger32(LConstantOperand::cast(instr->index())); | 4074 ToInteger32(LConstantOperand::cast(instr->index())); |
4054 if (instr->hydrogen()->length()->representation().IsSmi()) { | 4075 if (instr->hydrogen()->length()->representation().IsSmi()) { |
4055 __ Cmp(length, Smi::FromInt(constant_index)); | 4076 __ Cmp(length, Smi::FromInt(constant_index)); |
4056 } else { | 4077 } else { |
4057 __ cmpq(length, Immediate(constant_index)); | 4078 __ cmpq(length, Immediate(constant_index)); |
4058 } | 4079 } |
4059 } else { | 4080 } else { |
4060 __ cmpq(length, ToRegister(instr->index())); | 4081 __ cmpq(length, ToRegister(instr->index())); |
4061 } | 4082 } |
4062 } | 4083 } |
4063 DeoptimizeIf(below_equal, instr->environment()); | 4084 Condition condition = |
| 4085 instr->hydrogen()->allow_equality() ? below : below_equal; |
| 4086 ApplyCheckIf(condition, instr); |
4064 } | 4087 } |
4065 | 4088 |
4066 | 4089 |
4067 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { | 4090 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { |
4068 ElementsKind elements_kind = instr->elements_kind(); | 4091 ElementsKind elements_kind = instr->elements_kind(); |
4069 LOperand* key = instr->key(); | 4092 LOperand* key = instr->key(); |
4070 if (!key->IsConstantOperand()) { | 4093 if (!key->IsConstantOperand()) { |
4071 Register key_reg = ToRegister(key); | 4094 Register key_reg = ToRegister(key); |
4072 // Even though the HLoad/StoreKeyedFastElement instructions force | 4095 // Even though the HLoad/StoreKeyedFastElement instructions force |
4073 // the input representation for the key to be an integer, the input | 4096 // the input representation for the key to be an integer, the input |
4074 // gets replaced during bound check elimination with the index | 4097 // gets replaced during bound check elimination with the index |
4075 // argument to the bounds check, which can be tagged, so that case | 4098 // argument to the bounds check, which can be tagged, so that case |
4076 // must be handled here, too. | 4099 // must be handled here, too. |
4077 if (instr->hydrogen()->key()->representation().IsSmi()) { | 4100 if (instr->hydrogen()->IsDehoisted()) { |
4078 __ SmiToInteger64(key_reg, key_reg); | |
4079 } else if (instr->hydrogen()->IsDehoisted()) { | |
4080 // Sign extend key because it could be a 32 bit negative value | 4101 // Sign extend key because it could be a 32 bit negative value |
4081 // and the dehoisted address computation happens in 64 bits | 4102 // and the dehoisted address computation happens in 64 bits |
4082 __ movsxlq(key_reg, key_reg); | 4103 __ movsxlq(key_reg, key_reg); |
4083 } | 4104 } |
4084 } | 4105 } |
4085 Operand operand(BuildFastArrayOperand( | 4106 Operand operand(BuildFastArrayOperand( |
4086 instr->elements(), | 4107 instr->elements(), |
4087 key, | 4108 key, |
4088 elements_kind, | 4109 elements_kind, |
4089 0, | 4110 0, |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4131 void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) { | 4152 void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) { |
4132 XMMRegister value = ToDoubleRegister(instr->value()); | 4153 XMMRegister value = ToDoubleRegister(instr->value()); |
4133 LOperand* key = instr->key(); | 4154 LOperand* key = instr->key(); |
4134 if (!key->IsConstantOperand()) { | 4155 if (!key->IsConstantOperand()) { |
4135 Register key_reg = ToRegister(key); | 4156 Register key_reg = ToRegister(key); |
4136 // Even though the HLoad/StoreKeyedFastElement instructions force | 4157 // Even though the HLoad/StoreKeyedFastElement instructions force |
4137 // the input representation for the key to be an integer, the | 4158 // the input representation for the key to be an integer, the |
4138 // input gets replaced during bound check elimination with the index | 4159 // input gets replaced during bound check elimination with the index |
4139 // argument to the bounds check, which can be tagged, so that case | 4160 // argument to the bounds check, which can be tagged, so that case |
4140 // must be handled here, too. | 4161 // must be handled here, too. |
4141 if (instr->hydrogen()->key()->representation().IsSmi()) { | 4162 if (instr->hydrogen()->IsDehoisted()) { |
4142 __ SmiToInteger64(key_reg, key_reg); | |
4143 } else if (instr->hydrogen()->IsDehoisted()) { | |
4144 // Sign extend key because it could be a 32 bit negative value | 4163 // Sign extend key because it could be a 32 bit negative value |
4145 // and the dehoisted address computation happens in 64 bits | 4164 // and the dehoisted address computation happens in 64 bits |
4146 __ movsxlq(key_reg, key_reg); | 4165 __ movsxlq(key_reg, key_reg); |
4147 } | 4166 } |
4148 } | 4167 } |
4149 | 4168 |
4150 if (instr->NeedsCanonicalization()) { | 4169 if (instr->NeedsCanonicalization()) { |
4151 Label have_value; | 4170 Label have_value; |
4152 | 4171 |
4153 __ ucomisd(value, value); | 4172 __ ucomisd(value, value); |
(...skipping 20 matching lines...) Expand all Loading... |
4174 void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) { | 4193 void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) { |
4175 Register elements = ToRegister(instr->elements()); | 4194 Register elements = ToRegister(instr->elements()); |
4176 LOperand* key = instr->key(); | 4195 LOperand* key = instr->key(); |
4177 if (!key->IsConstantOperand()) { | 4196 if (!key->IsConstantOperand()) { |
4178 Register key_reg = ToRegister(key); | 4197 Register key_reg = ToRegister(key); |
4179 // Even though the HLoad/StoreKeyedFastElement instructions force | 4198 // Even though the HLoad/StoreKeyedFastElement instructions force |
4180 // the input representation for the key to be an integer, the | 4199 // the input representation for the key to be an integer, the |
4181 // input gets replaced during bound check elimination with the index | 4200 // input gets replaced during bound check elimination with the index |
4182 // argument to the bounds check, which can be tagged, so that case | 4201 // argument to the bounds check, which can be tagged, so that case |
4183 // must be handled here, too. | 4202 // must be handled here, too. |
4184 if (instr->hydrogen()->key()->representation().IsSmi()) { | 4203 if (instr->hydrogen()->IsDehoisted()) { |
4185 __ SmiToInteger64(key_reg, key_reg); | |
4186 } else if (instr->hydrogen()->IsDehoisted()) { | |
4187 // Sign extend key because it could be a 32 bit negative value | 4204 // Sign extend key because it could be a 32 bit negative value |
4188 // and the dehoisted address computation happens in 64 bits | 4205 // and the dehoisted address computation happens in 64 bits |
4189 __ movsxlq(key_reg, key_reg); | 4206 __ movsxlq(key_reg, key_reg); |
4190 } | 4207 } |
4191 } | 4208 } |
4192 | 4209 |
4193 Operand operand = | 4210 Operand operand = |
4194 BuildFastArrayOperand(instr->elements(), | 4211 BuildFastArrayOperand(instr->elements(), |
4195 key, | 4212 key, |
4196 FAST_ELEMENTS, | 4213 FAST_ELEMENTS, |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4264 __ Cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map); | 4281 __ Cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map); |
4265 __ j(not_equal, ¬_applicable); | 4282 __ j(not_equal, ¬_applicable); |
4266 if (IsSimpleMapChangeTransition(from_kind, to_kind)) { | 4283 if (IsSimpleMapChangeTransition(from_kind, to_kind)) { |
4267 Register new_map_reg = ToRegister(instr->new_map_temp()); | 4284 Register new_map_reg = ToRegister(instr->new_map_temp()); |
4268 __ movq(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT); | 4285 __ movq(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT); |
4269 __ movq(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg); | 4286 __ movq(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg); |
4270 // Write barrier. | 4287 // Write barrier. |
4271 ASSERT_NE(instr->temp(), NULL); | 4288 ASSERT_NE(instr->temp(), NULL); |
4272 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, | 4289 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, |
4273 ToRegister(instr->temp()), kDontSaveFPRegs); | 4290 ToRegister(instr->temp()), kDontSaveFPRegs); |
4274 } else if (FLAG_compiled_transitions) { | 4291 } else { |
4275 PushSafepointRegistersScope scope(this); | 4292 PushSafepointRegistersScope scope(this); |
4276 if (!object_reg.is(rax)) { | 4293 if (!object_reg.is(rax)) { |
4277 __ movq(rax, object_reg); | 4294 __ movq(rax, object_reg); |
4278 } | 4295 } |
4279 __ Move(rbx, to_map); | 4296 __ Move(rbx, to_map); |
4280 TransitionElementsKindStub stub(from_kind, to_kind); | 4297 TransitionElementsKindStub stub(from_kind, to_kind); |
4281 __ CallStub(&stub); | 4298 __ CallStub(&stub); |
4282 RecordSafepointWithRegisters( | 4299 RecordSafepointWithRegisters( |
4283 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); | 4300 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
4284 } else if (IsFastSmiElementsKind(from_kind) && | |
4285 IsFastDoubleElementsKind(to_kind)) { | |
4286 Register fixed_object_reg = ToRegister(instr->temp()); | |
4287 ASSERT(fixed_object_reg.is(rdx)); | |
4288 Register new_map_reg = ToRegister(instr->new_map_temp()); | |
4289 ASSERT(new_map_reg.is(rbx)); | |
4290 __ movq(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT); | |
4291 __ movq(fixed_object_reg, object_reg); | |
4292 CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(), | |
4293 RelocInfo::CODE_TARGET, instr); | |
4294 } else if (IsFastDoubleElementsKind(from_kind) && | |
4295 IsFastObjectElementsKind(to_kind)) { | |
4296 Register fixed_object_reg = ToRegister(instr->temp()); | |
4297 ASSERT(fixed_object_reg.is(rdx)); | |
4298 Register new_map_reg = ToRegister(instr->new_map_temp()); | |
4299 ASSERT(new_map_reg.is(rbx)); | |
4300 __ movq(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT); | |
4301 __ movq(fixed_object_reg, object_reg); | |
4302 CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(), | |
4303 RelocInfo::CODE_TARGET, instr); | |
4304 } else { | |
4305 UNREACHABLE(); | |
4306 } | 4301 } |
4307 __ bind(¬_applicable); | 4302 __ bind(¬_applicable); |
4308 } | 4303 } |
4309 | 4304 |
4310 | 4305 |
4311 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { | 4306 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { |
4312 Register object = ToRegister(instr->object()); | 4307 Register object = ToRegister(instr->object()); |
4313 Register temp = ToRegister(instr->temp()); | 4308 Register temp = ToRegister(instr->temp()); |
4314 __ TestJSArrayForAllocationMemento(object, temp); | 4309 __ TestJSArrayForAllocationMemento(object, temp); |
4315 DeoptimizeIf(equal, instr->environment()); | 4310 DeoptimizeIf(equal, instr->environment()); |
(...skipping 631 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4947 Handle<Map> map, | 4942 Handle<Map> map, |
4948 LInstruction* instr) { | 4943 LInstruction* instr) { |
4949 Label success; | 4944 Label success; |
4950 __ CompareMap(reg, map, &success); | 4945 __ CompareMap(reg, map, &success); |
4951 DeoptimizeIf(not_equal, instr->environment()); | 4946 DeoptimizeIf(not_equal, instr->environment()); |
4952 __ bind(&success); | 4947 __ bind(&success); |
4953 } | 4948 } |
4954 | 4949 |
4955 | 4950 |
4956 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { | 4951 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { |
| 4952 if (instr->hydrogen()->CanOmitMapChecks()) return; |
4957 LOperand* input = instr->value(); | 4953 LOperand* input = instr->value(); |
4958 ASSERT(input->IsRegister()); | 4954 ASSERT(input->IsRegister()); |
4959 Register reg = ToRegister(input); | 4955 Register reg = ToRegister(input); |
4960 | 4956 |
4961 Label success; | 4957 Label success; |
4962 SmallMapList* map_set = instr->hydrogen()->map_set(); | 4958 SmallMapList* map_set = instr->hydrogen()->map_set(); |
4963 for (int i = 0; i < map_set->length() - 1; i++) { | 4959 for (int i = 0; i < map_set->length() - 1; i++) { |
4964 Handle<Map> map = map_set->at(i); | 4960 Handle<Map> map = map_set->at(i); |
4965 __ CompareMap(reg, map, &success); | 4961 __ CompareMap(reg, map, &success); |
4966 __ j(equal, &success); | 4962 __ j(equal, &success); |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5014 // smi | 5010 // smi |
5015 __ bind(&is_smi); | 5011 __ bind(&is_smi); |
5016 __ SmiToInteger32(input_reg, input_reg); | 5012 __ SmiToInteger32(input_reg, input_reg); |
5017 __ ClampUint8(input_reg); | 5013 __ ClampUint8(input_reg); |
5018 | 5014 |
5019 __ bind(&done); | 5015 __ bind(&done); |
5020 } | 5016 } |
5021 | 5017 |
5022 | 5018 |
5023 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { | 5019 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { |
| 5020 if (instr->hydrogen()->CanOmitPrototypeChecks()) return; |
5024 Register reg = ToRegister(instr->temp()); | 5021 Register reg = ToRegister(instr->temp()); |
5025 | 5022 |
5026 ZoneList<Handle<JSObject> >* prototypes = instr->prototypes(); | 5023 ZoneList<Handle<JSObject> >* prototypes = instr->prototypes(); |
5027 ZoneList<Handle<Map> >* maps = instr->maps(); | 5024 ZoneList<Handle<Map> >* maps = instr->maps(); |
5028 | 5025 |
5029 ASSERT(prototypes->length() == maps->length()); | 5026 ASSERT(prototypes->length() == maps->length()); |
5030 | 5027 |
5031 if (!instr->hydrogen()->CanOmitPrototypeChecks()) { | 5028 for (int i = 0; i < prototypes->length(); i++) { |
5032 for (int i = 0; i < prototypes->length(); i++) { | 5029 __ LoadHeapObject(reg, prototypes->at(i)); |
5033 __ LoadHeapObject(reg, prototypes->at(i)); | 5030 DoCheckMapCommon(reg, maps->at(i), instr); |
5034 DoCheckMapCommon(reg, maps->at(i), instr); | |
5035 } | |
5036 } | 5031 } |
5037 } | 5032 } |
5038 | 5033 |
5039 | 5034 |
5040 void LCodeGen::DoAllocate(LAllocate* instr) { | 5035 void LCodeGen::DoAllocate(LAllocate* instr) { |
5041 class DeferredAllocate: public LDeferredCode { | 5036 class DeferredAllocate: public LDeferredCode { |
5042 public: | 5037 public: |
5043 DeferredAllocate(LCodeGen* codegen, LAllocate* instr) | 5038 DeferredAllocate(LCodeGen* codegen, LAllocate* instr) |
5044 : LDeferredCode(codegen), instr_(instr) { } | 5039 : LDeferredCode(codegen), instr_(instr) { } |
5045 virtual void Generate() { codegen()->DoDeferredAllocate(instr_); } | 5040 virtual void Generate() { codegen()->DoDeferredAllocate(instr_); } |
(...skipping 311 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5357 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 5352 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
5358 last_lazy_deopt_pc_ = masm()->pc_offset(); | 5353 last_lazy_deopt_pc_ = masm()->pc_offset(); |
5359 ASSERT(instr->HasEnvironment()); | 5354 ASSERT(instr->HasEnvironment()); |
5360 LEnvironment* env = instr->environment(); | 5355 LEnvironment* env = instr->environment(); |
5361 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5356 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
5362 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5357 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
5363 } | 5358 } |
5364 | 5359 |
5365 | 5360 |
5366 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 5361 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
5367 if (instr->hydrogen_value()->IsSoftDeoptimize()) { | 5362 Deoptimizer::BailoutType type = instr->hydrogen()->type(); |
5368 SoftDeoptimize(instr->environment()); | 5363 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the |
5369 } else { | 5364 // needed return address), even though the implementation of LAZY and EAGER is |
5370 DeoptimizeIf(no_condition, instr->environment()); | 5365 // now identical. When LAZY is eventually completely folded into EAGER, remove |
| 5366 // the special case below. |
| 5367 if (info()->IsStub() && type == Deoptimizer::EAGER) { |
| 5368 type = Deoptimizer::LAZY; |
5371 } | 5369 } |
| 5370 DeoptimizeIf(no_condition, instr->environment(), type); |
5372 } | 5371 } |
5373 | 5372 |
5374 | 5373 |
5375 void LCodeGen::DoDummyUse(LDummyUse* instr) { | 5374 void LCodeGen::DoDummyUse(LDummyUse* instr) { |
5376 // Nothing to see here, move on! | 5375 // Nothing to see here, move on! |
5377 } | 5376 } |
5378 | 5377 |
5379 | 5378 |
5380 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { | 5379 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { |
5381 PushSafepointRegistersScope scope(this); | 5380 PushSafepointRegistersScope scope(this); |
(...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5538 FixedArray::kHeaderSize - kPointerSize)); | 5537 FixedArray::kHeaderSize - kPointerSize)); |
5539 __ bind(&done); | 5538 __ bind(&done); |
5540 } | 5539 } |
5541 | 5540 |
5542 | 5541 |
5543 #undef __ | 5542 #undef __ |
5544 | 5543 |
5545 } } // namespace v8::internal | 5544 } } // namespace v8::internal |
5546 | 5545 |
5547 #endif // V8_TARGET_ARCH_X64 | 5546 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |