| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 399 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 410 return DwVfpRegister::FromAllocationIndex(index); | 410 return DwVfpRegister::FromAllocationIndex(index); |
| 411 } | 411 } |
| 412 | 412 |
| 413 | 413 |
| 414 Register LCodeGen::ToRegister(LOperand* op) const { | 414 Register LCodeGen::ToRegister(LOperand* op) const { |
| 415 ASSERT(op->IsRegister()); | 415 ASSERT(op->IsRegister()); |
| 416 return ToRegister(op->index()); | 416 return ToRegister(op->index()); |
| 417 } | 417 } |
| 418 | 418 |
| 419 | 419 |
| 420 Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) { | |
| 421 if (op->IsRegister()) { | |
| 422 return ToRegister(op->index()); | |
| 423 } else if (op->IsConstantOperand()) { | |
| 424 LConstantOperand* const_op = LConstantOperand::cast(op); | |
| 425 HConstant* constant = chunk_->LookupConstant(const_op); | |
| 426 Handle<Object> literal = constant->handle(); | |
| 427 Representation r = chunk_->LookupLiteralRepresentation(const_op); | |
| 428 if (r.IsInteger32()) { | |
| 429 ASSERT(literal->IsNumber()); | |
| 430 __ mov(scratch, Operand(static_cast<int32_t>(literal->Number()))); | |
| 431 } else if (r.IsDouble()) { | |
| 432 Abort(kEmitLoadRegisterUnsupportedDoubleImmediate); | |
| 433 } else { | |
| 434 ASSERT(r.IsTagged()); | |
| 435 __ LoadObject(scratch, literal); | |
| 436 } | |
| 437 return scratch; | |
| 438 } else if (op->IsStackSlot() || op->IsArgument()) { | |
| 439 __ ldr(scratch, ToMemOperand(op)); | |
| 440 return scratch; | |
| 441 } | |
| 442 UNREACHABLE(); | |
| 443 return scratch; | |
| 444 } | |
| 445 | |
| 446 | |
| 447 DwVfpRegister LCodeGen::ToDoubleRegister(LOperand* op) const { | 420 DwVfpRegister LCodeGen::ToDoubleRegister(LOperand* op) const { |
| 448 ASSERT(op->IsDoubleRegister()); | 421 ASSERT(op->IsDoubleRegister()); |
| 449 return ToDoubleRegister(op->index()); | 422 return ToDoubleRegister(op->index()); |
| 450 } | 423 } |
| 451 | 424 |
| 452 | 425 |
| 453 DwVfpRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op, | |
| 454 SwVfpRegister flt_scratch, | |
| 455 DwVfpRegister dbl_scratch) { | |
| 456 if (op->IsDoubleRegister()) { | |
| 457 return ToDoubleRegister(op->index()); | |
| 458 } else if (op->IsConstantOperand()) { | |
| 459 LConstantOperand* const_op = LConstantOperand::cast(op); | |
| 460 HConstant* constant = chunk_->LookupConstant(const_op); | |
| 461 Handle<Object> literal = constant->handle(); | |
| 462 Representation r = chunk_->LookupLiteralRepresentation(const_op); | |
| 463 if (r.IsInteger32()) { | |
| 464 ASSERT(literal->IsNumber()); | |
| 465 __ mov(ip, Operand(static_cast<int32_t>(literal->Number()))); | |
| 466 __ vmov(flt_scratch, ip); | |
| 467 __ vcvt_f64_s32(dbl_scratch, flt_scratch); | |
| 468 return dbl_scratch; | |
| 469 } else if (r.IsDouble()) { | |
| 470 Abort(kUnsupportedDoubleImmediate); | |
| 471 } else if (r.IsTagged()) { | |
| 472 Abort(kUnsupportedTaggedImmediate); | |
| 473 } | |
| 474 } else if (op->IsStackSlot() || op->IsArgument()) { | |
| 475 // TODO(regis): Why is vldr not taking a MemOperand? | |
| 476 // __ vldr(dbl_scratch, ToMemOperand(op)); | |
| 477 MemOperand mem_op = ToMemOperand(op); | |
| 478 __ vldr(dbl_scratch, mem_op.rn(), mem_op.offset()); | |
| 479 return dbl_scratch; | |
| 480 } | |
| 481 UNREACHABLE(); | |
| 482 return dbl_scratch; | |
| 483 } | |
| 484 | |
| 485 | |
| 486 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { | 426 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { |
| 487 HConstant* constant = chunk_->LookupConstant(op); | 427 HConstant* constant = chunk_->LookupConstant(op); |
| 488 ASSERT(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged()); | 428 ASSERT(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged()); |
| 489 return constant->handle(); | 429 return constant->handle(); |
| 490 } | 430 } |
| 491 | 431 |
| 492 | 432 |
| 493 bool LCodeGen::IsInteger32(LConstantOperand* op) const { | 433 bool LCodeGen::IsInteger32(LConstantOperand* op) const { |
| 494 return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32(); | 434 return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32(); |
| 495 } | 435 } |
| (...skipping 865 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1361 // it gets implemented. | 1301 // it gets implemented. |
| 1362 __ mul(scratch, result, ip); | 1302 __ mul(scratch, result, ip); |
| 1363 __ sub(remainder, dividend, scratch); | 1303 __ sub(remainder, dividend, scratch); |
| 1364 } | 1304 } |
| 1365 } | 1305 } |
| 1366 } | 1306 } |
| 1367 | 1307 |
| 1368 | 1308 |
| 1369 void LCodeGen::DoDivI(LDivI* instr) { | 1309 void LCodeGen::DoDivI(LDivI* instr) { |
| 1370 if (instr->hydrogen()->HasPowerOf2Divisor()) { | 1310 if (instr->hydrogen()->HasPowerOf2Divisor()) { |
| 1371 Register dividend = ToRegister(instr->left()); | 1311 const Register dividend = ToRegister(instr->left()); |
| 1312 const Register result = ToRegister(instr->result()); |
| 1372 int32_t divisor = instr->hydrogen()->right()->GetInteger32Constant(); | 1313 int32_t divisor = instr->hydrogen()->right()->GetInteger32Constant(); |
| 1373 int32_t test_value = 0; | 1314 int32_t test_value = 0; |
| 1374 int32_t power = 0; | 1315 int32_t power = 0; |
| 1375 | 1316 |
| 1376 if (divisor > 0) { | 1317 if (divisor > 0) { |
| 1377 test_value = divisor - 1; | 1318 test_value = divisor - 1; |
| 1378 power = WhichPowerOf2(divisor); | 1319 power = WhichPowerOf2(divisor); |
| 1379 } else { | 1320 } else { |
| 1380 // Check for (0 / -x) that will produce negative zero. | 1321 // Check for (0 / -x) that will produce negative zero. |
| 1381 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1322 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 1382 __ tst(dividend, Operand(dividend)); | 1323 __ cmp(dividend, Operand::Zero()); |
| 1383 DeoptimizeIf(eq, instr->environment()); | 1324 DeoptimizeIf(eq, instr->environment()); |
| 1384 } | 1325 } |
| 1385 // Check for (kMinInt / -1). | 1326 // Check for (kMinInt / -1). |
| 1386 if (divisor == -1 && instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 1327 if (divisor == -1 && instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
| 1387 __ cmp(dividend, Operand(kMinInt)); | 1328 __ cmp(dividend, Operand(kMinInt)); |
| 1388 DeoptimizeIf(eq, instr->environment()); | 1329 DeoptimizeIf(eq, instr->environment()); |
| 1389 } | 1330 } |
| 1390 test_value = - divisor - 1; | 1331 test_value = - divisor - 1; |
| 1391 power = WhichPowerOf2(-divisor); | 1332 power = WhichPowerOf2(-divisor); |
| 1392 } | 1333 } |
| 1393 | 1334 |
| 1394 if (test_value != 0) { | 1335 if (test_value != 0) { |
| 1395 if (instr->hydrogen()->CheckFlag( | 1336 if (instr->hydrogen()->CheckFlag( |
| 1396 HInstruction::kAllUsesTruncatingToInt32)) { | 1337 HInstruction::kAllUsesTruncatingToInt32)) { |
| 1397 __ cmp(dividend, Operand(0)); | 1338 __ sub(result, dividend, Operand::Zero(), SetCC); |
| 1398 __ rsb(dividend, dividend, Operand(0), LeaveCC, lt); | 1339 __ rsb(result, result, Operand::Zero(), LeaveCC, lt); |
| 1399 __ mov(dividend, Operand(dividend, ASR, power)); | 1340 __ mov(result, Operand(result, ASR, power)); |
| 1400 if (divisor > 0) __ rsb(dividend, dividend, Operand(0), LeaveCC, lt); | 1341 if (divisor > 0) __ rsb(result, result, Operand::Zero(), LeaveCC, lt); |
| 1401 return; // Don't fall through to "__ rsb" below. | 1342 return; // Don't fall through to "__ rsb" below. |
| 1402 } else { | 1343 } else { |
| 1403 // Deoptimize if remainder is not 0. | 1344 // Deoptimize if remainder is not 0. |
| 1404 __ tst(dividend, Operand(test_value)); | 1345 __ tst(dividend, Operand(test_value)); |
| 1405 DeoptimizeIf(ne, instr->environment()); | 1346 DeoptimizeIf(ne, instr->environment()); |
| 1406 __ mov(dividend, Operand(dividend, ASR, power)); | 1347 __ mov(result, Operand(dividend, ASR, power)); |
| 1407 } | 1348 } |
| 1349 } else { |
| 1350 __ Move(result, dividend); |
| 1408 } | 1351 } |
| 1409 if (divisor < 0) __ rsb(dividend, dividend, Operand(0)); | 1352 |
| 1353 if (divisor < 0) __ rsb(result, result, Operand::Zero()); |
| 1410 | 1354 |
| 1411 return; | 1355 return; |
| 1412 } | 1356 } |
| 1413 | 1357 |
| 1414 const Register left = ToRegister(instr->left()); | 1358 const Register left = ToRegister(instr->left()); |
| 1415 const Register right = ToRegister(instr->right()); | 1359 const Register right = ToRegister(instr->right()); |
| 1416 const Register result = ToRegister(instr->result()); | 1360 const Register result = ToRegister(instr->result()); |
| 1417 | 1361 |
| 1418 // Check for x / 0. | 1362 // Check for x / 0. |
| 1419 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) { | 1363 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) { |
| (...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1565 __ mls(remainder, result, right, left); | 1509 __ mls(remainder, result, right, left); |
| 1566 __ cmp(remainder, Operand::Zero()); | 1510 __ cmp(remainder, Operand::Zero()); |
| 1567 __ sub(result, result, Operand(1), LeaveCC, ne); | 1511 __ sub(result, result, Operand(1), LeaveCC, ne); |
| 1568 | 1512 |
| 1569 __ bind(&done); | 1513 __ bind(&done); |
| 1570 } | 1514 } |
| 1571 } | 1515 } |
| 1572 | 1516 |
| 1573 | 1517 |
| 1574 void LCodeGen::DoMulI(LMulI* instr) { | 1518 void LCodeGen::DoMulI(LMulI* instr) { |
| 1575 Register scratch = scratch0(); | |
| 1576 Register result = ToRegister(instr->result()); | 1519 Register result = ToRegister(instr->result()); |
| 1577 // Note that result may alias left. | 1520 // Note that result may alias left. |
| 1578 Register left = ToRegister(instr->left()); | 1521 Register left = ToRegister(instr->left()); |
| 1579 LOperand* right_op = instr->right(); | 1522 LOperand* right_op = instr->right(); |
| 1580 | 1523 |
| 1581 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); | |
| 1582 bool bailout_on_minus_zero = | 1524 bool bailout_on_minus_zero = |
| 1583 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero); | 1525 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero); |
| 1584 | 1526 |
| 1585 if (right_op->IsConstantOperand() && !can_overflow) { | 1527 if (right_op->IsConstantOperand()) { |
| 1586 // Use optimized code for specific constants. | 1528 // Use optimized code for specific constants. |
| 1587 int32_t constant = ToRepresentation( | 1529 int32_t constant = ToRepresentation( |
| 1588 LConstantOperand::cast(right_op), | 1530 LConstantOperand::cast(right_op), |
| 1589 instr->hydrogen()->right()->representation()); | 1531 instr->hydrogen()->right()->representation()); |
| 1590 | 1532 |
| 1591 if (bailout_on_minus_zero && (constant < 0)) { | 1533 if (bailout_on_minus_zero && (constant < 0)) { |
| 1592 // The case of a null constant will be handled separately. | 1534 // The case of a null constant will be handled separately. |
| 1593 // If constant is negative and left is null, the result should be -0. | 1535 // If constant is negative and left is null, the result should be -0. |
| 1594 __ cmp(left, Operand::Zero()); | 1536 __ cmp(left, Operand::Zero()); |
| 1595 DeoptimizeIf(eq, instr->environment()); | 1537 DeoptimizeIf(eq, instr->environment()); |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1636 if (constant < 0) __ rsb(result, result, Operand::Zero()); | 1578 if (constant < 0) __ rsb(result, result, Operand::Zero()); |
| 1637 | 1579 |
| 1638 } else { | 1580 } else { |
| 1639 // Generate standard code. | 1581 // Generate standard code. |
| 1640 __ mov(ip, Operand(constant)); | 1582 __ mov(ip, Operand(constant)); |
| 1641 __ mul(result, left, ip); | 1583 __ mul(result, left, ip); |
| 1642 } | 1584 } |
| 1643 } | 1585 } |
| 1644 | 1586 |
| 1645 } else { | 1587 } else { |
| 1646 Register right = EmitLoadRegister(right_op, scratch); | 1588 ASSERT(right_op->IsRegister()); |
| 1647 if (bailout_on_minus_zero) { | 1589 Register right = ToRegister(right_op); |
| 1648 __ orr(ToRegister(instr->temp()), left, right); | |
| 1649 } | |
| 1650 | 1590 |
| 1651 if (can_overflow) { | 1591 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
| 1592 Register scratch = scratch0(); |
| 1652 // scratch:result = left * right. | 1593 // scratch:result = left * right. |
| 1653 if (instr->hydrogen()->representation().IsSmi()) { | 1594 if (instr->hydrogen()->representation().IsSmi()) { |
| 1654 __ SmiUntag(result, left); | 1595 __ SmiUntag(result, left); |
| 1655 __ smull(result, scratch, result, right); | 1596 __ smull(result, scratch, result, right); |
| 1656 } else { | 1597 } else { |
| 1657 __ smull(result, scratch, left, right); | 1598 __ smull(result, scratch, left, right); |
| 1658 } | 1599 } |
| 1659 __ cmp(scratch, Operand(result, ASR, 31)); | 1600 __ cmp(scratch, Operand(result, ASR, 31)); |
| 1660 DeoptimizeIf(ne, instr->environment()); | 1601 DeoptimizeIf(ne, instr->environment()); |
| 1661 } else { | 1602 } else { |
| 1662 if (instr->hydrogen()->representation().IsSmi()) { | 1603 if (instr->hydrogen()->representation().IsSmi()) { |
| 1663 __ SmiUntag(result, left); | 1604 __ SmiUntag(result, left); |
| 1664 __ mul(result, result, right); | 1605 __ mul(result, result, right); |
| 1665 } else { | 1606 } else { |
| 1666 __ mul(result, left, right); | 1607 __ mul(result, left, right); |
| 1667 } | 1608 } |
| 1668 } | 1609 } |
| 1669 | 1610 |
| 1670 if (bailout_on_minus_zero) { | 1611 if (bailout_on_minus_zero) { |
| 1671 // Bail out if the result is supposed to be negative zero. | |
| 1672 Label done; | 1612 Label done; |
| 1613 __ teq(left, Operand(right)); |
| 1614 __ b(pl, &done); |
| 1615 // Bail out if the result is minus zero. |
| 1673 __ cmp(result, Operand::Zero()); | 1616 __ cmp(result, Operand::Zero()); |
| 1674 __ b(ne, &done); | 1617 DeoptimizeIf(eq, instr->environment()); |
| 1675 __ cmp(ToRegister(instr->temp()), Operand::Zero()); | |
| 1676 DeoptimizeIf(mi, instr->environment()); | |
| 1677 __ bind(&done); | 1618 __ bind(&done); |
| 1678 } | 1619 } |
| 1679 } | 1620 } |
| 1680 } | 1621 } |
| 1681 | 1622 |
| 1682 | 1623 |
| 1683 void LCodeGen::DoBitI(LBitI* instr) { | 1624 void LCodeGen::DoBitI(LBitI* instr) { |
| 1684 LOperand* left_op = instr->left(); | 1625 LOperand* left_op = instr->left(); |
| 1685 LOperand* right_op = instr->right(); | 1626 LOperand* right_op = instr->right(); |
| 1686 ASSERT(left_op->IsRegister()); | 1627 ASSERT(left_op->IsRegister()); |
| 1687 Register left = ToRegister(left_op); | 1628 Register left = ToRegister(left_op); |
| 1688 Register result = ToRegister(instr->result()); | 1629 Register result = ToRegister(instr->result()); |
| 1689 Operand right(no_reg); | |
| 1690 | 1630 |
| 1691 if (right_op->IsStackSlot() || right_op->IsArgument()) { | 1631 if (right_op->IsStackSlot()) { |
| 1692 right = Operand(EmitLoadRegister(right_op, ip)); | 1632 MemOperand right(ToMemOperand(right_op)); |
| 1633 switch (instr->op()) { |
| 1634 case Token::BIT_AND: |
| 1635 __ AndMemOperand(result, left, right); |
| 1636 break; |
| 1637 case Token::BIT_OR: |
| 1638 __ OrrMemOperand(result, left, right); |
| 1639 break; |
| 1640 case Token::BIT_XOR: |
| 1641 __ EorMemOperand(result, left, right); |
| 1642 break; |
| 1643 default: UNREACHABLE(); |
| 1644 } |
| 1693 } else { | 1645 } else { |
| 1694 ASSERT(right_op->IsRegister() || right_op->IsConstantOperand()); | 1646 Operand right(ToOperand(right_op)); |
| 1695 right = ToOperand(right_op); | 1647 switch (instr->op()) { |
| 1696 } | 1648 case Token::BIT_AND: |
| 1697 | 1649 __ and_(result, left, right); |
| 1698 switch (instr->op()) { | 1650 break; |
| 1699 case Token::BIT_AND: | 1651 case Token::BIT_OR: |
| 1700 __ and_(result, left, right); | 1652 __ orr(result, left, right); |
| 1701 break; | 1653 break; |
| 1702 case Token::BIT_OR: | 1654 case Token::BIT_XOR: |
| 1703 __ orr(result, left, right); | 1655 if (right_op->IsConstantOperand() |
| 1704 break; | 1656 && right.immediate() == int32_t(~0)) { |
| 1705 case Token::BIT_XOR: | 1657 __ mvn(result, Operand(left)); |
| 1706 if (right_op->IsConstantOperand() && right.immediate() == int32_t(~0)) { | 1658 } else { |
| 1707 __ mvn(result, Operand(left)); | 1659 __ eor(result, left, right); |
| 1708 } else { | 1660 } |
| 1709 __ eor(result, left, right); | 1661 break; |
| 1710 } | 1662 default: UNREACHABLE(); |
| 1711 break; | 1663 } |
| 1712 default: | |
| 1713 UNREACHABLE(); | |
| 1714 break; | |
| 1715 } | 1664 } |
| 1716 } | 1665 } |
| 1717 | 1666 |
| 1718 | 1667 |
| 1719 void LCodeGen::DoShiftI(LShiftI* instr) { | 1668 void LCodeGen::DoShiftI(LShiftI* instr) { |
| 1720 // Both 'left' and 'right' are "used at start" (see LCodeGen::DoShift), so | 1669 // Both 'left' and 'right' are "used at start" (see LCodeGen::DoShift), so |
| 1721 // result may alias either of them. | 1670 // result may alias either of them. |
| 1722 LOperand* right_op = instr->right(); | 1671 LOperand* right_op = instr->right(); |
| 1723 Register left = ToRegister(instr->left()); | 1672 Register left = ToRegister(instr->left()); |
| 1724 Register result = ToRegister(instr->result()); | 1673 Register result = ToRegister(instr->result()); |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1798 break; | 1747 break; |
| 1799 default: | 1748 default: |
| 1800 UNREACHABLE(); | 1749 UNREACHABLE(); |
| 1801 break; | 1750 break; |
| 1802 } | 1751 } |
| 1803 } | 1752 } |
| 1804 } | 1753 } |
| 1805 | 1754 |
| 1806 | 1755 |
| 1807 void LCodeGen::DoSubI(LSubI* instr) { | 1756 void LCodeGen::DoSubI(LSubI* instr) { |
| 1808 LOperand* left = instr->left(); | 1757 Register left = ToRegister(instr->left()); |
| 1809 LOperand* right = instr->right(); | 1758 LOperand* right_op = instr->right(); |
| 1810 LOperand* result = instr->result(); | 1759 Register result = ToRegister(instr->result()); |
| 1811 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); | 1760 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); |
| 1812 SBit set_cond = can_overflow ? SetCC : LeaveCC; | 1761 SBit set_cond = can_overflow ? SetCC : LeaveCC; |
| 1813 | 1762 |
| 1814 if (right->IsStackSlot() || right->IsArgument()) { | 1763 if (right_op->IsStackSlot()) { |
| 1815 Register right_reg = EmitLoadRegister(right, ip); | 1764 __ SubMemOperand(result, left, ToMemOperand(right_op), set_cond); |
| 1816 __ sub(ToRegister(result), ToRegister(left), Operand(right_reg), set_cond); | |
| 1817 } else { | 1765 } else { |
| 1818 ASSERT(right->IsRegister() || right->IsConstantOperand()); | 1766 ASSERT(right_op->IsRegister() || right_op->IsConstantOperand()); |
| 1819 __ sub(ToRegister(result), ToRegister(left), ToOperand(right), set_cond); | 1767 __ sub(result, left, ToOperand(right_op), set_cond); |
| 1820 } | 1768 } |
| 1821 | 1769 |
| 1822 if (can_overflow) { | 1770 if (can_overflow) { |
| 1823 DeoptimizeIf(vs, instr->environment()); | 1771 DeoptimizeIf(vs, instr->environment()); |
| 1824 } | 1772 } |
| 1825 } | 1773 } |
| 1826 | 1774 |
| 1827 | 1775 |
| 1828 void LCodeGen::DoRSubI(LRSubI* instr) { | 1776 void LCodeGen::DoRSubI(LRSubI* instr) { |
| 1829 LOperand* left = instr->left(); | 1777 Register left = ToRegister(instr->left()); |
| 1830 LOperand* right = instr->right(); | 1778 LOperand* right_op = instr->right(); |
| 1831 LOperand* result = instr->result(); | 1779 Register result = ToRegister(instr->result()); |
| 1832 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); | 1780 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); |
| 1833 SBit set_cond = can_overflow ? SetCC : LeaveCC; | 1781 SBit set_cond = can_overflow ? SetCC : LeaveCC; |
| 1834 | 1782 |
| 1835 if (right->IsStackSlot() || right->IsArgument()) { | 1783 if (right_op->IsStackSlot()) { |
| 1836 Register right_reg = EmitLoadRegister(right, ip); | 1784 __ RsbMemOperand(result, left, ToMemOperand(right_op), set_cond); |
| 1837 __ rsb(ToRegister(result), ToRegister(left), Operand(right_reg), set_cond); | |
| 1838 } else { | 1785 } else { |
| 1839 ASSERT(right->IsRegister() || right->IsConstantOperand()); | 1786 __ rsb(result, left, ToOperand(right_op), set_cond); |
| 1840 __ rsb(ToRegister(result), ToRegister(left), ToOperand(right), set_cond); | |
| 1841 } | 1787 } |
| 1842 | 1788 |
| 1843 if (can_overflow) { | 1789 if (can_overflow) { |
| 1844 DeoptimizeIf(vs, instr->environment()); | 1790 DeoptimizeIf(vs, instr->environment()); |
| 1845 } | 1791 } |
| 1846 } | 1792 } |
| 1847 | 1793 |
| 1848 | 1794 |
| 1849 void LCodeGen::DoConstantI(LConstantI* instr) { | 1795 void LCodeGen::DoConstantI(LConstantI* instr) { |
| 1850 __ mov(ToRegister(instr->result()), Operand(instr->value())); | 1796 __ mov(ToRegister(instr->result()), Operand(instr->value())); |
| (...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1954 __ PrepareCallCFunction(2, scratch); | 1900 __ PrepareCallCFunction(2, scratch); |
| 1955 __ mov(r1, Operand(index)); | 1901 __ mov(r1, Operand(index)); |
| 1956 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); | 1902 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); |
| 1957 __ bind(&done); | 1903 __ bind(&done); |
| 1958 } | 1904 } |
| 1959 } | 1905 } |
| 1960 | 1906 |
| 1961 | 1907 |
| 1962 void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) { | 1908 void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) { |
| 1963 Register string = ToRegister(instr->string()); | 1909 Register string = ToRegister(instr->string()); |
| 1964 Register index = ToRegister(instr->index()); | |
| 1965 Register value = ToRegister(instr->value()); | 1910 Register value = ToRegister(instr->value()); |
| 1911 Register scratch = scratch0(); |
| 1912 LOperand* index_op = instr->index(); |
| 1966 String::Encoding encoding = instr->encoding(); | 1913 String::Encoding encoding = instr->encoding(); |
| 1967 | 1914 |
| 1968 if (FLAG_debug_code) { | 1915 if (FLAG_debug_code) { |
| 1969 __ ldr(ip, FieldMemOperand(string, HeapObject::kMapOffset)); | 1916 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset)); |
| 1970 __ ldrb(ip, FieldMemOperand(ip, Map::kInstanceTypeOffset)); | 1917 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); |
| 1971 | 1918 |
| 1972 __ and_(ip, ip, Operand(kStringRepresentationMask | kStringEncodingMask)); | 1919 __ and_(scratch, scratch, |
| 1920 Operand(kStringRepresentationMask | kStringEncodingMask)); |
| 1973 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; | 1921 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; |
| 1974 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; | 1922 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; |
| 1975 __ cmp(ip, Operand(encoding == String::ONE_BYTE_ENCODING | 1923 __ cmp(scratch, Operand(encoding == String::ONE_BYTE_ENCODING |
| 1976 ? one_byte_seq_type : two_byte_seq_type)); | 1924 ? one_byte_seq_type : two_byte_seq_type)); |
| 1977 __ Check(eq, kUnexpectedStringType); | 1925 __ Check(eq, kUnexpectedStringType); |
| 1978 } | 1926 } |
| 1979 | 1927 |
| 1980 __ add(ip, | 1928 if (index_op->IsConstantOperand()) { |
| 1981 string, | 1929 int constant_index = ToInteger32(LConstantOperand::cast(index_op)); |
| 1982 Operand(SeqString::kHeaderSize - kHeapObjectTag)); | 1930 if (encoding == String::ONE_BYTE_ENCODING) { |
| 1983 if (encoding == String::ONE_BYTE_ENCODING) { | 1931 __ strb(value, |
| 1984 __ strb(value, MemOperand(ip, index)); | 1932 FieldMemOperand(string, |
| 1933 SeqString::kHeaderSize + constant_index)); |
| 1934 } else { |
| 1935 __ strh(value, |
| 1936 FieldMemOperand(string, |
| 1937 SeqString::kHeaderSize + constant_index * 2)); |
| 1938 } |
| 1985 } else { | 1939 } else { |
| 1986 // MemOperand with ip as the base register is not allowed for strh, so | 1940 Register index = ToRegister(index_op); |
| 1987 // we do the address calculation explicitly. | 1941 if (encoding == String::ONE_BYTE_ENCODING) { |
| 1988 __ add(ip, ip, Operand(index, LSL, 1)); | 1942 __ add(scratch, string, Operand(index)); |
| 1989 __ strh(value, MemOperand(ip)); | 1943 __ strb(value, FieldMemOperand(scratch, SeqString::kHeaderSize)); |
| 1944 } else { |
| 1945 __ add(scratch, string, Operand(index, LSL, 1)); |
| 1946 __ strh(value, FieldMemOperand(scratch, SeqString::kHeaderSize)); |
| 1947 } |
| 1990 } | 1948 } |
| 1991 } | 1949 } |
| 1992 | 1950 |
| 1993 | 1951 |
| 1994 void LCodeGen::DoThrow(LThrow* instr) { | 1952 void LCodeGen::DoThrow(LThrow* instr) { |
| 1995 Register input_reg = EmitLoadRegister(instr->value(), ip); | 1953 Register input_reg = ToRegister(instr->value()); |
| 1996 __ push(input_reg); | 1954 __ push(input_reg); |
| 1997 CallRuntime(Runtime::kThrow, 1, instr); | 1955 CallRuntime(Runtime::kThrow, 1, instr); |
| 1998 | 1956 |
| 1999 if (FLAG_debug_code) { | 1957 if (FLAG_debug_code) { |
| 2000 __ stop("Unreachable code."); | 1958 __ stop("Unreachable code."); |
| 2001 } | 1959 } |
| 2002 } | 1960 } |
| 2003 | 1961 |
| 2004 | 1962 |
| 2005 void LCodeGen::DoAddI(LAddI* instr) { | 1963 void LCodeGen::DoAddI(LAddI* instr) { |
| 2006 LOperand* left = instr->left(); | 1964 Register left = ToRegister(instr->left()); |
| 2007 LOperand* right = instr->right(); | 1965 LOperand* right_op = instr->right(); |
| 2008 LOperand* result = instr->result(); | 1966 Register result = ToRegister(instr->result()); |
| 2009 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); | 1967 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); |
| 2010 SBit set_cond = can_overflow ? SetCC : LeaveCC; | 1968 SBit set_cond = can_overflow ? SetCC : LeaveCC; |
| 2011 | 1969 |
| 2012 if (right->IsStackSlot() || right->IsArgument()) { | 1970 if (right_op->IsStackSlot()) { |
| 2013 Register right_reg = EmitLoadRegister(right, ip); | 1971 __ AddMemOperand(result, left, ToMemOperand(right_op), set_cond); |
| 2014 __ add(ToRegister(result), ToRegister(left), Operand(right_reg), set_cond); | |
| 2015 } else { | 1972 } else { |
| 2016 ASSERT(right->IsRegister() || right->IsConstantOperand()); | 1973 ASSERT(right_op->IsRegister() || right_op->IsConstantOperand()); |
| 2017 __ add(ToRegister(result), ToRegister(left), ToOperand(right), set_cond); | 1974 __ add(result, left, ToOperand(right_op), set_cond); |
| 2018 } | 1975 } |
| 2019 | 1976 |
| 2020 if (can_overflow) { | 1977 if (can_overflow) { |
| 2021 DeoptimizeIf(vs, instr->environment()); | 1978 DeoptimizeIf(vs, instr->environment()); |
| 2022 } | 1979 } |
| 2023 } | 1980 } |
| 2024 | 1981 |
| 2025 | 1982 |
| 2026 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { | 1983 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { |
| 2027 LOperand* left = instr->left(); | 1984 LOperand* left = instr->left(); |
| 2028 LOperand* right = instr->right(); | 1985 LOperand* right = instr->right(); |
| 2029 HMathMinMax::Operation operation = instr->hydrogen()->operation(); | 1986 HMathMinMax::Operation operation = instr->hydrogen()->operation(); |
| 2030 if (instr->hydrogen()->representation().IsSmiOrInteger32()) { | 1987 if (instr->hydrogen()->representation().IsSmiOrInteger32()) { |
| 2031 Condition condition = (operation == HMathMinMax::kMathMin) ? le : ge; | 1988 Condition condition = (operation == HMathMinMax::kMathMin) ? le : ge; |
| 2032 Register left_reg = ToRegister(left); | 1989 Register left_reg = ToRegister(left); |
| 2033 Operand right_op = (right->IsRegister() || right->IsConstantOperand()) | 1990 Operand right_op = ToOperand(right); |
| 2034 ? ToOperand(right) | |
| 2035 : Operand(EmitLoadRegister(right, ip)); | |
| 2036 Register result_reg = ToRegister(instr->result()); | 1991 Register result_reg = ToRegister(instr->result()); |
| 2037 __ cmp(left_reg, right_op); | 1992 __ cmp(left_reg, right_op); |
| 2038 __ Move(result_reg, left_reg, condition); | 1993 __ Move(result_reg, left_reg, condition); |
| 2039 __ mov(result_reg, right_op, LeaveCC, NegateCondition(condition)); | 1994 __ mov(result_reg, right_op, LeaveCC, NegateCondition(condition)); |
| 2040 } else { | 1995 } else { |
| 2041 ASSERT(instr->hydrogen()->representation().IsDouble()); | 1996 ASSERT(instr->hydrogen()->representation().IsDouble()); |
| 2042 DwVfpRegister left_reg = ToDoubleRegister(left); | 1997 DwVfpRegister left_reg = ToDoubleRegister(left); |
| 2043 DwVfpRegister right_reg = ToDoubleRegister(right); | 1998 DwVfpRegister right_reg ; |
| 1999 if (right->IsDoubleStackSlot()) { |
| 2000 __ vldr(double_scratch0(), ToMemOperand(right)); |
| 2001 right_reg = double_scratch0(); |
| 2002 } else { |
| 2003 right_reg = ToDoubleRegister(right); |
| 2004 } |
| 2044 DwVfpRegister result_reg = ToDoubleRegister(instr->result()); | 2005 DwVfpRegister result_reg = ToDoubleRegister(instr->result()); |
| 2045 Label result_is_nan, return_left, return_right, check_zero, done; | 2006 Label result_is_nan, return_left, return_right, check_zero, done; |
| 2046 __ VFPCompareAndSetFlags(left_reg, right_reg); | 2007 __ VFPCompareAndSetFlags(left_reg, right_reg); |
| 2047 if (operation == HMathMinMax::kMathMin) { | 2008 if (operation == HMathMinMax::kMathMin) { |
| 2048 __ b(mi, &return_left); | 2009 __ b(mi, &return_left); |
| 2049 __ b(gt, &return_right); | 2010 __ b(gt, &return_right); |
| 2050 } else { | 2011 } else { |
| 2051 __ b(mi, &return_right); | 2012 __ b(mi, &return_right); |
| 2052 __ b(gt, &return_left); | 2013 __ b(gt, &return_left); |
| 2053 } | 2014 } |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2085 __ bind(&return_left); | 2046 __ bind(&return_left); |
| 2086 __ Move(result_reg, left_reg); | 2047 __ Move(result_reg, left_reg); |
| 2087 | 2048 |
| 2088 __ bind(&done); | 2049 __ bind(&done); |
| 2089 } | 2050 } |
| 2090 } | 2051 } |
| 2091 | 2052 |
| 2092 | 2053 |
| 2093 void LCodeGen::DoArithmeticD(LArithmeticD* instr) { | 2054 void LCodeGen::DoArithmeticD(LArithmeticD* instr) { |
| 2094 DwVfpRegister left = ToDoubleRegister(instr->left()); | 2055 DwVfpRegister left = ToDoubleRegister(instr->left()); |
| 2095 DwVfpRegister right = ToDoubleRegister(instr->right()); | 2056 DwVfpRegister right; |
| 2057 if (instr->right()->IsDoubleStackSlot()) { |
| 2058 __ vldr(double_scratch0(), ToMemOperand(instr->right())); |
| 2059 right = double_scratch0(); |
| 2060 } else { |
| 2061 ASSERT(instr->right()->IsDoubleRegister()); |
| 2062 right = ToDoubleRegister(instr->right()); |
| 2063 } |
| 2096 DwVfpRegister result = ToDoubleRegister(instr->result()); | 2064 DwVfpRegister result = ToDoubleRegister(instr->result()); |
| 2097 switch (instr->op()) { | 2065 switch (instr->op()) { |
| 2098 case Token::ADD: | 2066 case Token::ADD: |
| 2099 __ vadd(result, left, right); | 2067 __ vadd(result, left, right); |
| 2100 break; | 2068 break; |
| 2101 case Token::SUB: | 2069 case Token::SUB: |
| 2102 __ vsub(result, left, right); | 2070 __ vsub(result, left, right); |
| 2103 break; | 2071 break; |
| 2104 case Token::MUL: | 2072 case Token::MUL: |
| 2105 __ vmul(result, left, right); | 2073 __ vmul(result, left, right); |
| (...skipping 407 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2513 instr->hydrogen()->value()->IsHeapObject() | 2481 instr->hydrogen()->value()->IsHeapObject() |
| 2514 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; | 2482 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
| 2515 Condition true_cond = | 2483 Condition true_cond = |
| 2516 EmitIsString(reg, temp1, instr->FalseLabel(chunk_), check_needed); | 2484 EmitIsString(reg, temp1, instr->FalseLabel(chunk_), check_needed); |
| 2517 | 2485 |
| 2518 EmitBranch(instr, true_cond); | 2486 EmitBranch(instr, true_cond); |
| 2519 } | 2487 } |
| 2520 | 2488 |
| 2521 | 2489 |
| 2522 void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) { | 2490 void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) { |
| 2523 Register input_reg = EmitLoadRegister(instr->value(), ip); | 2491 __ SmiTst(ToRegister(instr->value())); |
| 2524 __ SmiTst(input_reg); | |
| 2525 EmitBranch(instr, eq); | 2492 EmitBranch(instr, eq); |
| 2526 } | 2493 } |
| 2527 | 2494 |
| 2528 | 2495 |
| 2529 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) { | 2496 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) { |
| 2530 Register input = ToRegister(instr->value()); | 2497 Register input = ToRegister(instr->value()); |
| 2531 Register temp = ToRegister(instr->temp()); | 2498 Register temp = ToRegister(instr->temp()); |
| 2532 | 2499 |
| 2533 if (!instr->hydrogen()->value()->IsHeapObject()) { | 2500 if (!instr->hydrogen()->value()->IsHeapObject()) { |
| 2534 __ JumpIfSmi(input, instr->FalseLabel(chunk_)); | 2501 __ JumpIfSmi(input, instr->FalseLabel(chunk_)); |
| (...skipping 604 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3139 Register to_reg = ToRegister(instr->result()); | 3106 Register to_reg = ToRegister(instr->result()); |
| 3140 Register from_reg = ToRegister(instr->object()); | 3107 Register from_reg = ToRegister(instr->object()); |
| 3141 __ ldr(to_reg, FieldMemOperand(from_reg, | 3108 __ ldr(to_reg, FieldMemOperand(from_reg, |
| 3142 ExternalArray::kExternalPointerOffset)); | 3109 ExternalArray::kExternalPointerOffset)); |
| 3143 } | 3110 } |
| 3144 | 3111 |
| 3145 | 3112 |
| 3146 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) { | 3113 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) { |
| 3147 Register arguments = ToRegister(instr->arguments()); | 3114 Register arguments = ToRegister(instr->arguments()); |
| 3148 Register result = ToRegister(instr->result()); | 3115 Register result = ToRegister(instr->result()); |
| 3149 if (instr->length()->IsConstantOperand() && | 3116 // There are two words between the frame pointer and the last argument. |
| 3150 instr->index()->IsConstantOperand()) { | 3117 // Subtracting from length accounts for one of them add one more. |
| 3118 if (instr->length()->IsConstantOperand()) { |
| 3119 int const_length = ToInteger32(LConstantOperand::cast(instr->length())); |
| 3120 if (instr->index()->IsConstantOperand()) { |
| 3121 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); |
| 3122 int index = (const_length - const_index) + 1; |
| 3123 __ ldr(result, MemOperand(arguments, index * kPointerSize)); |
| 3124 } else { |
| 3125 Register index = ToRegister(instr->index()); |
| 3126 __ rsb(result, index, Operand(const_length + 1)); |
| 3127 __ ldr(result, MemOperand(arguments, result, LSL, kPointerSizeLog2)); |
| 3128 } |
| 3129 } else if (instr->index()->IsConstantOperand()) { |
| 3130 Register length = ToRegister(instr->length()); |
| 3151 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); | 3131 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); |
| 3152 int const_length = ToInteger32(LConstantOperand::cast(instr->length())); | 3132 int loc = const_index - 1; |
| 3153 int index = (const_length - const_index) + 1; | 3133 if (loc != 0) { |
| 3154 __ ldr(result, MemOperand(arguments, index * kPointerSize)); | 3134 __ sub(result, length, Operand(loc)); |
| 3135 __ ldr(result, MemOperand(arguments, result, LSL, kPointerSizeLog2)); |
| 3136 } else { |
| 3137 __ ldr(result, MemOperand(arguments, length, LSL, kPointerSizeLog2)); |
| 3138 } |
| 3155 } else { | 3139 } else { |
| 3156 Register length = ToRegister(instr->length()); | 3140 Register length = ToRegister(instr->length()); |
| 3157 Register index = ToRegister(instr->index()); | 3141 Register index = ToRegister(instr->index()); |
| 3158 // There are two words between the frame pointer and the last argument. | 3142 __ sub(result, length, index); |
| 3159 // Subtracting from length accounts for one of them add one more. | 3143 __ add(result, result, Operand(1)); |
| 3160 __ sub(length, length, index); | 3144 __ ldr(result, MemOperand(arguments, result, LSL, kPointerSizeLog2)); |
| 3161 __ add(length, length, Operand(1)); | |
| 3162 __ ldr(result, MemOperand(arguments, length, LSL, kPointerSizeLog2)); | |
| 3163 } | 3145 } |
| 3164 } | 3146 } |
| 3165 | 3147 |
| 3166 | 3148 |
| 3167 void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) { | 3149 void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) { |
| 3168 Register external_pointer = ToRegister(instr->elements()); | 3150 Register external_pointer = ToRegister(instr->elements()); |
| 3169 Register key = no_reg; | 3151 Register key = no_reg; |
| 3170 ElementsKind elements_kind = instr->elements_kind(); | 3152 ElementsKind elements_kind = instr->elements_kind(); |
| 3171 bool key_is_constant = instr->key()->IsConstantOperand(); | 3153 bool key_is_constant = instr->key()->IsConstantOperand(); |
| 3172 int constant_key = 0; | 3154 int constant_key = 0; |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3246 void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) { | 3228 void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) { |
| 3247 Register elements = ToRegister(instr->elements()); | 3229 Register elements = ToRegister(instr->elements()); |
| 3248 bool key_is_constant = instr->key()->IsConstantOperand(); | 3230 bool key_is_constant = instr->key()->IsConstantOperand(); |
| 3249 Register key = no_reg; | 3231 Register key = no_reg; |
| 3250 DwVfpRegister result = ToDoubleRegister(instr->result()); | 3232 DwVfpRegister result = ToDoubleRegister(instr->result()); |
| 3251 Register scratch = scratch0(); | 3233 Register scratch = scratch0(); |
| 3252 | 3234 |
| 3253 int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS); | 3235 int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS); |
| 3254 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) | 3236 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) |
| 3255 ? (element_size_shift - kSmiTagSize) : element_size_shift; | 3237 ? (element_size_shift - kSmiTagSize) : element_size_shift; |
| 3256 int constant_key = 0; | |
| 3257 if (key_is_constant) { | 3238 if (key_is_constant) { |
| 3258 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); | 3239 int constant_key = ToInteger32(LConstantOperand::cast(instr->key())); |
| 3259 if (constant_key & 0xF0000000) { | 3240 if (constant_key & 0xF0000000) { |
| 3260 Abort(kArrayIndexConstantValueTooBig); | 3241 Abort(kArrayIndexConstantValueTooBig); |
| 3261 } | 3242 } |
| 3243 int base_offset = (FixedDoubleArray::kHeaderSize - kHeapObjectTag) + |
| 3244 ((constant_key + instr->additional_index()) << element_size_shift); |
| 3245 __ add(scratch, elements, Operand(base_offset)); |
| 3262 } else { | 3246 } else { |
| 3247 int base_offset = (FixedDoubleArray::kHeaderSize - kHeapObjectTag) + |
| 3248 ((instr->additional_index()) << element_size_shift); |
| 3263 key = ToRegister(instr->key()); | 3249 key = ToRegister(instr->key()); |
| 3250 __ add(scratch, elements, Operand(base_offset)); |
| 3251 __ add(scratch, scratch, Operand(key, LSL, shift_size)); |
| 3264 } | 3252 } |
| 3265 | 3253 |
| 3266 int base_offset = (FixedDoubleArray::kHeaderSize - kHeapObjectTag) + | 3254 __ vldr(result, scratch, 0); |
| 3267 ((constant_key + instr->additional_index()) << element_size_shift); | |
| 3268 if (!key_is_constant) { | |
| 3269 __ add(elements, elements, Operand(key, LSL, shift_size)); | |
| 3270 } | |
| 3271 __ add(elements, elements, Operand(base_offset)); | |
| 3272 __ vldr(result, elements, 0); | |
| 3273 if (instr->hydrogen()->RequiresHoleCheck()) { | 3255 if (instr->hydrogen()->RequiresHoleCheck()) { |
| 3274 __ ldr(scratch, MemOperand(elements, sizeof(kHoleNanLower32))); | 3256 __ ldr(scratch, MemOperand(scratch, sizeof(kHoleNanLower32))); |
| 3275 __ cmp(scratch, Operand(kHoleNanUpper32)); | 3257 __ cmp(scratch, Operand(kHoleNanUpper32)); |
| 3276 DeoptimizeIf(eq, instr->environment()); | 3258 DeoptimizeIf(eq, instr->environment()); |
| 3277 } | 3259 } |
| 3278 } | 3260 } |
| 3279 | 3261 |
| 3280 | 3262 |
| 3281 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { | 3263 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { |
| 3282 Register elements = ToRegister(instr->elements()); | 3264 Register elements = ToRegister(instr->elements()); |
| 3283 Register result = ToRegister(instr->result()); | 3265 Register result = ToRegister(instr->result()); |
| 3284 Register scratch = scratch0(); | 3266 Register scratch = scratch0(); |
| 3285 Register store_base = scratch; | 3267 Register store_base = scratch; |
| 3286 int offset = 0; | 3268 int offset = 0; |
| 3287 | 3269 |
| 3288 if (instr->key()->IsConstantOperand()) { | 3270 if (instr->key()->IsConstantOperand()) { |
| 3289 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); | 3271 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); |
| 3290 offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) + | 3272 offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) + |
| 3291 instr->additional_index()); | 3273 instr->additional_index()); |
| 3292 store_base = elements; | 3274 store_base = elements; |
| 3293 } else { | 3275 } else { |
| 3294 Register key = EmitLoadRegister(instr->key(), scratch0()); | 3276 Register key = ToRegister(instr->key()); |
| 3295 // Even though the HLoadKeyed instruction forces the input | 3277 // Even though the HLoadKeyed instruction forces the input |
| 3296 // representation for the key to be an integer, the input gets replaced | 3278 // representation for the key to be an integer, the input gets replaced |
| 3297 // during bound check elimination with the index argument to the bounds | 3279 // during bound check elimination with the index argument to the bounds |
| 3298 // check, which can be tagged, so that case must be handled here, too. | 3280 // check, which can be tagged, so that case must be handled here, too. |
| 3299 if (instr->hydrogen()->key()->representation().IsSmi()) { | 3281 if (instr->hydrogen()->key()->representation().IsSmi()) { |
| 3300 __ add(scratch, elements, Operand::PointerOffsetFromSmiKey(key)); | 3282 __ add(scratch, elements, Operand::PointerOffsetFromSmiKey(key)); |
| 3301 } else { | 3283 } else { |
| 3302 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2)); | 3284 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2)); |
| 3303 } | 3285 } |
| 3304 offset = FixedArray::OffsetOfElementAt(instr->additional_index()); | 3286 offset = FixedArray::OffsetOfElementAt(instr->additional_index()); |
| (...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3509 // The number of arguments is stored in receiver which is r0, as expected | 3491 // The number of arguments is stored in receiver which is r0, as expected |
| 3510 // by InvokeFunction. | 3492 // by InvokeFunction. |
| 3511 ParameterCount actual(receiver); | 3493 ParameterCount actual(receiver); |
| 3512 __ InvokeFunction(function, actual, CALL_FUNCTION, | 3494 __ InvokeFunction(function, actual, CALL_FUNCTION, |
| 3513 safepoint_generator, CALL_AS_METHOD); | 3495 safepoint_generator, CALL_AS_METHOD); |
| 3514 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 3496 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 3515 } | 3497 } |
| 3516 | 3498 |
| 3517 | 3499 |
| 3518 void LCodeGen::DoPushArgument(LPushArgument* instr) { | 3500 void LCodeGen::DoPushArgument(LPushArgument* instr) { |
| 3519 LOperand* argument = instr->value(); | 3501 ASSERT(instr->value()->IsRegister()); |
| 3520 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) { | 3502 __ push(ToRegister(instr->value())); |
| 3521 Abort(kDoPushArgumentNotImplementedForDoubleType); | |
| 3522 } else { | |
| 3523 Register argument_reg = EmitLoadRegister(argument, ip); | |
| 3524 __ push(argument_reg); | |
| 3525 } | |
| 3526 } | 3503 } |
| 3527 | 3504 |
| 3528 | 3505 |
| 3529 void LCodeGen::DoDrop(LDrop* instr) { | 3506 void LCodeGen::DoDrop(LDrop* instr) { |
| 3530 __ Drop(instr->count()); | 3507 __ Drop(instr->count()); |
| 3531 } | 3508 } |
| 3532 | 3509 |
| 3533 | 3510 |
| 3534 void LCodeGen::DoThisFunction(LThisFunction* instr) { | 3511 void LCodeGen::DoThisFunction(LThisFunction* instr) { |
| 3535 Register result = ToRegister(instr->result()); | 3512 Register result = ToRegister(instr->result()); |
| (...skipping 418 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3954 } | 3931 } |
| 3955 | 3932 |
| 3956 | 3933 |
| 3957 void LCodeGen::DoMathExp(LMathExp* instr) { | 3934 void LCodeGen::DoMathExp(LMathExp* instr) { |
| 3958 DwVfpRegister input = ToDoubleRegister(instr->value()); | 3935 DwVfpRegister input = ToDoubleRegister(instr->value()); |
| 3959 DwVfpRegister result = ToDoubleRegister(instr->result()); | 3936 DwVfpRegister result = ToDoubleRegister(instr->result()); |
| 3960 DwVfpRegister double_scratch1 = ToDoubleRegister(instr->double_temp()); | 3937 DwVfpRegister double_scratch1 = ToDoubleRegister(instr->double_temp()); |
| 3961 DwVfpRegister double_scratch2 = double_scratch0(); | 3938 DwVfpRegister double_scratch2 = double_scratch0(); |
| 3962 Register temp1 = ToRegister(instr->temp1()); | 3939 Register temp1 = ToRegister(instr->temp1()); |
| 3963 Register temp2 = ToRegister(instr->temp2()); | 3940 Register temp2 = ToRegister(instr->temp2()); |
| 3941 Register temp3 = scratch0(); |
| 3964 | 3942 |
| 3965 MathExpGenerator::EmitMathExp( | 3943 MathExpGenerator::EmitMathExp( |
| 3966 masm(), input, result, double_scratch1, double_scratch2, | 3944 masm(), input, result, double_scratch1, double_scratch2, |
| 3967 temp1, temp2, scratch0()); | 3945 temp1, temp2, temp3); |
| 3968 } | 3946 } |
| 3969 | 3947 |
| 3970 | 3948 |
| 3971 void LCodeGen::DoMathLog(LMathLog* instr) { | 3949 void LCodeGen::DoMathLog(LMathLog* instr) { |
| 3972 ASSERT(ToDoubleRegister(instr->result()).is(d2)); | 3950 ASSERT(ToDoubleRegister(instr->result()).is(d2)); |
| 3973 TranscendentalCacheStub stub(TranscendentalCache::LOG, | 3951 TranscendentalCacheStub stub(TranscendentalCache::LOG, |
| 3974 TranscendentalCacheStub::UNTAGGED); | 3952 TranscendentalCacheStub::UNTAGGED); |
| 3975 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 3953 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 3976 } | 3954 } |
| 3977 | 3955 |
| (...skipping 319 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4297 } else { | 4275 } else { |
| 4298 key = ToRegister(instr->key()); | 4276 key = ToRegister(instr->key()); |
| 4299 } | 4277 } |
| 4300 int element_size_shift = ElementsKindToShiftSize(elements_kind); | 4278 int element_size_shift = ElementsKindToShiftSize(elements_kind); |
| 4301 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) | 4279 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) |
| 4302 ? (element_size_shift - kSmiTagSize) : element_size_shift; | 4280 ? (element_size_shift - kSmiTagSize) : element_size_shift; |
| 4303 int additional_offset = instr->additional_index() << element_size_shift; | 4281 int additional_offset = instr->additional_index() << element_size_shift; |
| 4304 | 4282 |
| 4305 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS || | 4283 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS || |
| 4306 elements_kind == EXTERNAL_DOUBLE_ELEMENTS) { | 4284 elements_kind == EXTERNAL_DOUBLE_ELEMENTS) { |
| 4285 Register address = scratch0(); |
| 4307 DwVfpRegister value(ToDoubleRegister(instr->value())); | 4286 DwVfpRegister value(ToDoubleRegister(instr->value())); |
| 4308 Operand operand(key_is_constant | 4287 if (key_is_constant) { |
| 4309 ? Operand(constant_key << element_size_shift) | 4288 if (constant_key != 0) { |
| 4310 : Operand(key, LSL, shift_size)); | 4289 __ add(address, external_pointer, |
| 4311 __ add(scratch0(), external_pointer, operand); | 4290 Operand(constant_key << element_size_shift)) ; |
| 4291 } else { |
| 4292 address = external_pointer; |
| 4293 } |
| 4294 } else { |
| 4295 __ add(address, external_pointer, Operand(key, LSL, shift_size)); |
| 4296 } |
| 4312 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) { | 4297 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) { |
| 4313 __ vcvt_f32_f64(double_scratch0().low(), value); | 4298 __ vcvt_f32_f64(double_scratch0().low(), value); |
| 4314 __ vstr(double_scratch0().low(), scratch0(), additional_offset); | 4299 __ vstr(double_scratch0().low(), address, additional_offset); |
| 4315 } else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS | 4300 } else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS |
| 4316 __ vstr(value, scratch0(), additional_offset); | 4301 __ vstr(value, address, additional_offset); |
| 4317 } | 4302 } |
| 4318 } else { | 4303 } else { |
| 4319 Register value(ToRegister(instr->value())); | 4304 Register value(ToRegister(instr->value())); |
| 4320 MemOperand mem_operand = PrepareKeyedOperand( | 4305 MemOperand mem_operand = PrepareKeyedOperand( |
| 4321 key, external_pointer, key_is_constant, constant_key, | 4306 key, external_pointer, key_is_constant, constant_key, |
| 4322 element_size_shift, shift_size, | 4307 element_size_shift, shift_size, |
| 4323 instr->additional_index(), additional_offset); | 4308 instr->additional_index(), additional_offset); |
| 4324 switch (elements_kind) { | 4309 switch (elements_kind) { |
| 4325 case EXTERNAL_PIXEL_ELEMENTS: | 4310 case EXTERNAL_PIXEL_ELEMENTS: |
| 4326 case EXTERNAL_BYTE_ELEMENTS: | 4311 case EXTERNAL_BYTE_ELEMENTS: |
| (...skipping 21 matching lines...) Expand all Loading... |
| 4348 UNREACHABLE(); | 4333 UNREACHABLE(); |
| 4349 break; | 4334 break; |
| 4350 } | 4335 } |
| 4351 } | 4336 } |
| 4352 } | 4337 } |
| 4353 | 4338 |
| 4354 | 4339 |
| 4355 void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) { | 4340 void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) { |
| 4356 DwVfpRegister value = ToDoubleRegister(instr->value()); | 4341 DwVfpRegister value = ToDoubleRegister(instr->value()); |
| 4357 Register elements = ToRegister(instr->elements()); | 4342 Register elements = ToRegister(instr->elements()); |
| 4358 Register key = no_reg; | |
| 4359 Register scratch = scratch0(); | 4343 Register scratch = scratch0(); |
| 4344 DwVfpRegister double_scratch = double_scratch0(); |
| 4360 bool key_is_constant = instr->key()->IsConstantOperand(); | 4345 bool key_is_constant = instr->key()->IsConstantOperand(); |
| 4361 int constant_key = 0; | |
| 4362 | 4346 |
| 4363 // Calculate the effective address of the slot in the array to store the | 4347 // Calculate the effective address of the slot in the array to store the |
| 4364 // double value. | 4348 // double value. |
| 4349 int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS); |
| 4365 if (key_is_constant) { | 4350 if (key_is_constant) { |
| 4366 constant_key = ToInteger32(LConstantOperand::cast(instr->key())); | 4351 int constant_key = ToInteger32(LConstantOperand::cast(instr->key())); |
| 4367 if (constant_key & 0xF0000000) { | 4352 if (constant_key & 0xF0000000) { |
| 4368 Abort(kArrayIndexConstantValueTooBig); | 4353 Abort(kArrayIndexConstantValueTooBig); |
| 4369 } | 4354 } |
| 4355 __ add(scratch, elements, |
| 4356 Operand((constant_key << element_size_shift) + |
| 4357 FixedDoubleArray::kHeaderSize - kHeapObjectTag)); |
| 4370 } else { | 4358 } else { |
| 4371 key = ToRegister(instr->key()); | 4359 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) |
| 4372 } | 4360 ? (element_size_shift - kSmiTagSize) : element_size_shift; |
| 4373 int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS); | 4361 __ add(scratch, elements, |
| 4374 int shift_size = (instr->hydrogen()->key()->representation().IsSmi()) | 4362 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag)); |
| 4375 ? (element_size_shift - kSmiTagSize) : element_size_shift; | |
| 4376 Operand operand = key_is_constant | |
| 4377 ? Operand((constant_key << element_size_shift) + | |
| 4378 FixedDoubleArray::kHeaderSize - kHeapObjectTag) | |
| 4379 : Operand(key, LSL, shift_size); | |
| 4380 __ add(scratch, elements, operand); | |
| 4381 if (!key_is_constant) { | |
| 4382 __ add(scratch, scratch, | 4363 __ add(scratch, scratch, |
| 4383 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag)); | 4364 Operand(ToRegister(instr->key()), LSL, shift_size)); |
| 4384 } | 4365 } |
| 4385 | 4366 |
| 4386 if (instr->NeedsCanonicalization()) { | 4367 if (instr->NeedsCanonicalization()) { |
| 4387 // Force a canonical NaN. | 4368 // Force a canonical NaN. |
| 4388 if (masm()->emit_debug_code()) { | 4369 if (masm()->emit_debug_code()) { |
| 4389 __ vmrs(ip); | 4370 __ vmrs(ip); |
| 4390 __ tst(ip, Operand(kVFPDefaultNaNModeControlBit)); | 4371 __ tst(ip, Operand(kVFPDefaultNaNModeControlBit)); |
| 4391 __ Assert(ne, kDefaultNaNModeNotSet); | 4372 __ Assert(ne, kDefaultNaNModeNotSet); |
| 4392 } | 4373 } |
| 4393 __ VFPCanonicalizeNaN(value); | 4374 __ VFPCanonicalizeNaN(double_scratch, value); |
| 4375 __ vstr(double_scratch, scratch, |
| 4376 instr->additional_index() << element_size_shift); |
| 4377 } else { |
| 4378 __ vstr(value, scratch, |
| 4379 instr->additional_index() << element_size_shift); |
| 4394 } | 4380 } |
| 4395 __ vstr(value, scratch, instr->additional_index() << element_size_shift); | |
| 4396 } | 4381 } |
| 4397 | 4382 |
| 4398 | 4383 |
| 4399 void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) { | 4384 void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) { |
| 4400 Register value = ToRegister(instr->value()); | 4385 Register value = ToRegister(instr->value()); |
| 4401 Register elements = ToRegister(instr->elements()); | 4386 Register elements = ToRegister(instr->elements()); |
| 4402 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) | 4387 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) |
| 4403 : no_reg; | 4388 : no_reg; |
| 4404 Register scratch = scratch0(); | 4389 Register scratch = scratch0(); |
| 4405 Register store_base = scratch; | 4390 Register store_base = scratch; |
| (...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4614 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 4599 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
| 4615 __ SmiTag(char_code); | 4600 __ SmiTag(char_code); |
| 4616 __ push(char_code); | 4601 __ push(char_code); |
| 4617 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr); | 4602 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr); |
| 4618 __ StoreToSafepointRegisterSlot(r0, result); | 4603 __ StoreToSafepointRegisterSlot(r0, result); |
| 4619 } | 4604 } |
| 4620 | 4605 |
| 4621 | 4606 |
| 4622 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { | 4607 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { |
| 4623 LOperand* input = instr->value(); | 4608 LOperand* input = instr->value(); |
| 4624 ASSERT(input->IsRegister() || input->IsStackSlot()); | |
| 4625 LOperand* output = instr->result(); | 4609 LOperand* output = instr->result(); |
| 4610 ASSERT(input->IsRegister()); |
| 4626 ASSERT(output->IsDoubleRegister()); | 4611 ASSERT(output->IsDoubleRegister()); |
| 4627 SwVfpRegister single_scratch = double_scratch0().low(); | 4612 SwVfpRegister single_scratch = double_scratch0().low(); |
| 4628 if (input->IsStackSlot()) { | 4613 __ vmov(single_scratch, ToRegister(input)); |
| 4629 Register scratch = scratch0(); | |
| 4630 __ ldr(scratch, ToMemOperand(input)); | |
| 4631 __ vmov(single_scratch, scratch); | |
| 4632 } else { | |
| 4633 __ vmov(single_scratch, ToRegister(input)); | |
| 4634 } | |
| 4635 __ vcvt_f64_s32(ToDoubleRegister(output), single_scratch); | 4614 __ vcvt_f64_s32(ToDoubleRegister(output), single_scratch); |
| 4636 } | 4615 } |
| 4637 | 4616 |
| 4638 | 4617 |
| 4639 void LCodeGen::DoInteger32ToSmi(LInteger32ToSmi* instr) { | 4618 void LCodeGen::DoInteger32ToSmi(LInteger32ToSmi* instr) { |
| 4640 LOperand* input = instr->value(); | 4619 LOperand* input = instr->value(); |
| 4641 ASSERT(input->IsRegister()); | 4620 ASSERT(input->IsRegister()); |
| 4642 LOperand* output = instr->result(); | 4621 LOperand* output = instr->result(); |
| 4643 ASSERT(output->IsRegister()); | 4622 ASSERT(output->IsRegister()); |
| 4644 __ SmiTag(ToRegister(output), ToRegister(input), SetCC); | 4623 __ SmiTag(ToRegister(output), ToRegister(input), SetCC); |
| (...skipping 661 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5306 scratch2, | 5285 scratch2, |
| 5307 deferred->entry(), | 5286 deferred->entry(), |
| 5308 flags); | 5287 flags); |
| 5309 } | 5288 } |
| 5310 | 5289 |
| 5311 __ bind(deferred->exit()); | 5290 __ bind(deferred->exit()); |
| 5312 | 5291 |
| 5313 if (instr->hydrogen()->MustPrefillWithFiller()) { | 5292 if (instr->hydrogen()->MustPrefillWithFiller()) { |
| 5314 if (instr->size()->IsConstantOperand()) { | 5293 if (instr->size()->IsConstantOperand()) { |
| 5315 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); | 5294 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); |
| 5316 __ mov(scratch, Operand(size)); | 5295 __ mov(scratch, Operand(size - kPointerSize)); |
| 5317 } else { | 5296 } else { |
| 5318 scratch = ToRegister(instr->size()); | 5297 Register size = ToRegister(instr->size()); |
| 5298 __ sub(scratch, size, Operand(kPointerSize)); |
| 5319 } | 5299 } |
| 5320 __ sub(scratch, scratch, Operand(kPointerSize)); | |
| 5321 __ sub(result, result, Operand(kHeapObjectTag)); | 5300 __ sub(result, result, Operand(kHeapObjectTag)); |
| 5322 Label loop; | 5301 Label loop; |
| 5323 __ bind(&loop); | 5302 __ bind(&loop); |
| 5324 __ mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map())); | 5303 __ mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map())); |
| 5325 __ str(scratch2, MemOperand(result, scratch)); | 5304 __ str(scratch2, MemOperand(result, scratch)); |
| 5326 __ sub(scratch, scratch, Operand(kPointerSize)); | 5305 __ sub(scratch, scratch, Operand(kPointerSize)); |
| 5327 __ cmp(scratch, Operand(0)); | 5306 __ cmp(scratch, Operand(0)); |
| 5328 __ b(ge, &loop); | 5307 __ b(ge, &loop); |
| 5329 __ add(result, result, Operand(kHeapObjectTag)); | 5308 __ add(result, result, Operand(kHeapObjectTag)); |
| 5330 } | 5309 } |
| (...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5455 | 5434 |
| 5456 | 5435 |
| 5457 Condition LCodeGen::EmitTypeofIs(Label* true_label, | 5436 Condition LCodeGen::EmitTypeofIs(Label* true_label, |
| 5458 Label* false_label, | 5437 Label* false_label, |
| 5459 Register input, | 5438 Register input, |
| 5460 Handle<String> type_name) { | 5439 Handle<String> type_name) { |
| 5461 Condition final_branch_condition = kNoCondition; | 5440 Condition final_branch_condition = kNoCondition; |
| 5462 Register scratch = scratch0(); | 5441 Register scratch = scratch0(); |
| 5463 if (type_name->Equals(heap()->number_string())) { | 5442 if (type_name->Equals(heap()->number_string())) { |
| 5464 __ JumpIfSmi(input, true_label); | 5443 __ JumpIfSmi(input, true_label); |
| 5465 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset)); | 5444 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); |
| 5466 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); | 5445 __ CompareRoot(scratch, Heap::kHeapNumberMapRootIndex); |
| 5467 __ cmp(input, Operand(ip)); | |
| 5468 final_branch_condition = eq; | 5446 final_branch_condition = eq; |
| 5469 | 5447 |
| 5470 } else if (type_name->Equals(heap()->string_string())) { | 5448 } else if (type_name->Equals(heap()->string_string())) { |
| 5471 __ JumpIfSmi(input, false_label); | 5449 __ JumpIfSmi(input, false_label); |
| 5472 __ CompareObjectType(input, input, scratch, FIRST_NONSTRING_TYPE); | 5450 __ CompareObjectType(input, scratch, no_reg, FIRST_NONSTRING_TYPE); |
| 5473 __ b(ge, false_label); | 5451 __ b(ge, false_label); |
| 5474 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset)); | 5452 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset)); |
| 5475 __ tst(ip, Operand(1 << Map::kIsUndetectable)); | 5453 __ tst(scratch, Operand(1 << Map::kIsUndetectable)); |
| 5476 final_branch_condition = eq; | 5454 final_branch_condition = eq; |
| 5477 | 5455 |
| 5478 } else if (type_name->Equals(heap()->symbol_string())) { | 5456 } else if (type_name->Equals(heap()->symbol_string())) { |
| 5479 __ JumpIfSmi(input, false_label); | 5457 __ JumpIfSmi(input, false_label); |
| 5480 __ CompareObjectType(input, input, scratch, SYMBOL_TYPE); | 5458 __ CompareObjectType(input, scratch, no_reg, SYMBOL_TYPE); |
| 5481 final_branch_condition = eq; | 5459 final_branch_condition = eq; |
| 5482 | 5460 |
| 5483 } else if (type_name->Equals(heap()->boolean_string())) { | 5461 } else if (type_name->Equals(heap()->boolean_string())) { |
| 5484 __ CompareRoot(input, Heap::kTrueValueRootIndex); | 5462 __ CompareRoot(input, Heap::kTrueValueRootIndex); |
| 5485 __ b(eq, true_label); | 5463 __ b(eq, true_label); |
| 5486 __ CompareRoot(input, Heap::kFalseValueRootIndex); | 5464 __ CompareRoot(input, Heap::kFalseValueRootIndex); |
| 5487 final_branch_condition = eq; | 5465 final_branch_condition = eq; |
| 5488 | 5466 |
| 5489 } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_string())) { | 5467 } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_string())) { |
| 5490 __ CompareRoot(input, Heap::kNullValueRootIndex); | 5468 __ CompareRoot(input, Heap::kNullValueRootIndex); |
| 5491 final_branch_condition = eq; | 5469 final_branch_condition = eq; |
| 5492 | 5470 |
| 5493 } else if (type_name->Equals(heap()->undefined_string())) { | 5471 } else if (type_name->Equals(heap()->undefined_string())) { |
| 5494 __ CompareRoot(input, Heap::kUndefinedValueRootIndex); | 5472 __ CompareRoot(input, Heap::kUndefinedValueRootIndex); |
| 5495 __ b(eq, true_label); | 5473 __ b(eq, true_label); |
| 5496 __ JumpIfSmi(input, false_label); | 5474 __ JumpIfSmi(input, false_label); |
| 5497 // Check for undetectable objects => true. | 5475 // Check for undetectable objects => true. |
| 5498 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset)); | 5476 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); |
| 5499 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset)); | 5477 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset)); |
| 5500 __ tst(ip, Operand(1 << Map::kIsUndetectable)); | 5478 __ tst(scratch, Operand(1 << Map::kIsUndetectable)); |
| 5501 final_branch_condition = ne; | 5479 final_branch_condition = ne; |
| 5502 | 5480 |
| 5503 } else if (type_name->Equals(heap()->function_string())) { | 5481 } else if (type_name->Equals(heap()->function_string())) { |
| 5504 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); | 5482 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); |
| 5483 Register type_reg = scratch; |
| 5505 __ JumpIfSmi(input, false_label); | 5484 __ JumpIfSmi(input, false_label); |
| 5506 __ CompareObjectType(input, scratch, input, JS_FUNCTION_TYPE); | 5485 __ CompareObjectType(input, scratch, type_reg, JS_FUNCTION_TYPE); |
| 5507 __ b(eq, true_label); | 5486 __ b(eq, true_label); |
| 5508 __ cmp(input, Operand(JS_FUNCTION_PROXY_TYPE)); | 5487 __ cmp(type_reg, Operand(JS_FUNCTION_PROXY_TYPE)); |
| 5509 final_branch_condition = eq; | 5488 final_branch_condition = eq; |
| 5510 | 5489 |
| 5511 } else if (type_name->Equals(heap()->object_string())) { | 5490 } else if (type_name->Equals(heap()->object_string())) { |
| 5512 __ JumpIfSmi(input, false_label); | 5491 __ JumpIfSmi(input, false_label); |
| 5513 if (!FLAG_harmony_typeof) { | 5492 if (!FLAG_harmony_typeof) { |
| 5514 __ CompareRoot(input, Heap::kNullValueRootIndex); | 5493 __ CompareRoot(input, Heap::kNullValueRootIndex); |
| 5515 __ b(eq, true_label); | 5494 __ b(eq, true_label); |
| 5516 } | 5495 } |
| 5517 __ CompareObjectType(input, input, scratch, | 5496 __ CheckObjectTypeRange(input, |
| 5518 FIRST_NONCALLABLE_SPEC_OBJECT_TYPE); | 5497 scratch, |
| 5519 __ b(lt, false_label); | 5498 FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, |
| 5520 __ CompareInstanceType(input, scratch, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); | 5499 LAST_NONCALLABLE_SPEC_OBJECT_TYPE, |
| 5521 __ b(gt, false_label); | 5500 false_label); |
| 5522 // Check for undetectable objects => false. | 5501 // Check for undetectable objects => false. |
| 5523 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset)); | 5502 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset)); |
| 5524 __ tst(ip, Operand(1 << Map::kIsUndetectable)); | 5503 __ tst(scratch, Operand(1 << Map::kIsUndetectable)); |
| 5525 final_branch_condition = eq; | 5504 final_branch_condition = eq; |
| 5526 | 5505 |
| 5527 } else { | 5506 } else { |
| 5528 __ b(false_label); | 5507 __ b(false_label); |
| 5529 } | 5508 } |
| 5530 | 5509 |
| 5531 return final_branch_condition; | 5510 return final_branch_condition; |
| 5532 } | 5511 } |
| 5533 | 5512 |
| 5534 | 5513 |
| (...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5775 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); | 5754 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); |
| 5776 __ ldr(result, FieldMemOperand(scratch, | 5755 __ ldr(result, FieldMemOperand(scratch, |
| 5777 FixedArray::kHeaderSize - kPointerSize)); | 5756 FixedArray::kHeaderSize - kPointerSize)); |
| 5778 __ bind(&done); | 5757 __ bind(&done); |
| 5779 } | 5758 } |
| 5780 | 5759 |
| 5781 | 5760 |
| 5782 #undef __ | 5761 #undef __ |
| 5783 | 5762 |
| 5784 } } // namespace v8::internal | 5763 } } // namespace v8::internal |
| OLD | NEW |