| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 328 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 339 const Register& rn, | 339 const Register& rn, |
| 340 const Register& rm) { | 340 const Register& rm) { |
| 341 ASSERT(allow_macro_instructions_); | 341 ASSERT(allow_macro_instructions_); |
| 342 ASSERT(!rd.IsZero()); | 342 ASSERT(!rd.IsZero()); |
| 343 asrv(rd, rn, rm); | 343 asrv(rd, rn, rm); |
| 344 } | 344 } |
| 345 | 345 |
| 346 | 346 |
| 347 void MacroAssembler::B(Label* label) { | 347 void MacroAssembler::B(Label* label) { |
| 348 b(label); | 348 b(label); |
| 349 CheckVeneers(false); | 349 CheckVeneerPool(false); |
| 350 } | 350 } |
| 351 | 351 |
| 352 | 352 |
| 353 void MacroAssembler::B(Condition cond, Label* label) { | 353 void MacroAssembler::B(Condition cond, Label* label) { |
| 354 ASSERT(allow_macro_instructions_); | 354 ASSERT(allow_macro_instructions_); |
| 355 B(label, cond); | 355 B(label, cond); |
| 356 } | 356 } |
| 357 | 357 |
| 358 | 358 |
| 359 void MacroAssembler::Bfi(const Register& rd, | 359 void MacroAssembler::Bfi(const Register& rd, |
| (...skipping 221 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 581 | 581 |
| 582 void MacroAssembler::Fcmp(const FPRegister& fn, const FPRegister& fm) { | 582 void MacroAssembler::Fcmp(const FPRegister& fn, const FPRegister& fm) { |
| 583 ASSERT(allow_macro_instructions_); | 583 ASSERT(allow_macro_instructions_); |
| 584 fcmp(fn, fm); | 584 fcmp(fn, fm); |
| 585 } | 585 } |
| 586 | 586 |
| 587 | 587 |
| 588 void MacroAssembler::Fcmp(const FPRegister& fn, double value) { | 588 void MacroAssembler::Fcmp(const FPRegister& fn, double value) { |
| 589 ASSERT(allow_macro_instructions_); | 589 ASSERT(allow_macro_instructions_); |
| 590 if (value != 0.0) { | 590 if (value != 0.0) { |
| 591 FPRegister tmp = AppropriateTempFor(fn); | 591 UseScratchRegisterScope temps(this); |
| 592 FPRegister tmp = temps.AcquireSameSizeAs(fn); |
| 592 Fmov(tmp, value); | 593 Fmov(tmp, value); |
| 593 fcmp(fn, tmp); | 594 fcmp(fn, tmp); |
| 594 } else { | 595 } else { |
| 595 fcmp(fn, value); | 596 fcmp(fn, value); |
| 596 } | 597 } |
| 597 } | 598 } |
| 598 | 599 |
| 599 | 600 |
| 600 void MacroAssembler::Fcsel(const FPRegister& fd, | 601 void MacroAssembler::Fcsel(const FPRegister& fd, |
| 601 const FPRegister& fn, | 602 const FPRegister& fn, |
| (...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 729 | 730 |
| 730 | 731 |
| 731 void MacroAssembler::Fmov(FPRegister fd, Register rn) { | 732 void MacroAssembler::Fmov(FPRegister fd, Register rn) { |
| 732 ASSERT(allow_macro_instructions_); | 733 ASSERT(allow_macro_instructions_); |
| 733 fmov(fd, rn); | 734 fmov(fd, rn); |
| 734 } | 735 } |
| 735 | 736 |
| 736 | 737 |
| 737 void MacroAssembler::Fmov(FPRegister fd, double imm) { | 738 void MacroAssembler::Fmov(FPRegister fd, double imm) { |
| 738 ASSERT(allow_macro_instructions_); | 739 ASSERT(allow_macro_instructions_); |
| 739 if ((fd.Is64Bits() && IsImmFP64(imm)) || | 740 if (fd.Is32Bits()) { |
| 740 (fd.Is32Bits() && IsImmFP32(imm)) || | 741 Fmov(fd, static_cast<float>(imm)); |
| 741 ((imm == 0.0) && (copysign(1.0, imm) == 1.0))) { | 742 return; |
| 742 // These cases can be handled by the Assembler. | 743 } |
| 744 |
| 745 ASSERT(fd.Is64Bits()); |
| 746 if (IsImmFP64(imm)) { |
| 743 fmov(fd, imm); | 747 fmov(fd, imm); |
| 748 } else if ((imm == 0.0) && (copysign(1.0, imm) == 1.0)) { |
| 749 fmov(fd, xzr); |
| 744 } else { | 750 } else { |
| 745 // TODO(all): The Assembler would try to relocate the immediate with | 751 UseScratchRegisterScope temps(this); |
| 746 // Assembler::ldr(const FPRegister& ft, double imm) but it is not | 752 Register tmp = temps.AcquireX(); |
| 747 // implemented yet. | 753 // TODO(all): Use Assembler::ldr(const FPRegister& ft, double imm). |
| 748 if (fd.SizeInBits() == kDRegSize) { | 754 Mov(tmp, double_to_rawbits(imm)); |
| 749 Mov(Tmp0(), double_to_rawbits(imm)); | 755 Fmov(fd, tmp); |
| 750 Fmov(fd, Tmp0()); | |
| 751 } else { | |
| 752 ASSERT(fd.SizeInBits() == kSRegSize); | |
| 753 Mov(WTmp0(), float_to_rawbits(static_cast<float>(imm))); | |
| 754 Fmov(fd, WTmp0()); | |
| 755 } | |
| 756 } | 756 } |
| 757 } | 757 } |
| 758 | 758 |
| 759 |
| 760 void MacroAssembler::Fmov(FPRegister fd, float imm) { |
| 761 ASSERT(allow_macro_instructions_); |
| 762 if (fd.Is64Bits()) { |
| 763 Fmov(fd, static_cast<double>(imm)); |
| 764 return; |
| 765 } |
| 766 |
| 767 ASSERT(fd.Is32Bits()); |
| 768 if (IsImmFP32(imm)) { |
| 769 fmov(fd, imm); |
| 770 } else if ((imm == 0.0) && (copysign(1.0, imm) == 1.0)) { |
| 771 fmov(fd, wzr); |
| 772 } else { |
| 773 UseScratchRegisterScope temps(this); |
| 774 Register tmp = temps.AcquireW(); |
| 775 // TODO(all): Use Assembler::ldr(const FPRegister& ft, float imm). |
| 776 Mov(tmp, float_to_rawbits(imm)); |
| 777 Fmov(fd, tmp); |
| 778 } |
| 779 } |
| 780 |
| 759 | 781 |
| 760 void MacroAssembler::Fmov(Register rd, FPRegister fn) { | 782 void MacroAssembler::Fmov(Register rd, FPRegister fn) { |
| 761 ASSERT(allow_macro_instructions_); | 783 ASSERT(allow_macro_instructions_); |
| 762 ASSERT(!rd.IsZero()); | 784 ASSERT(!rd.IsZero()); |
| 763 fmov(rd, fn); | 785 fmov(rd, fn); |
| 764 } | 786 } |
| 765 | 787 |
| 766 | 788 |
| 767 void MacroAssembler::Fmsub(const FPRegister& fd, | 789 void MacroAssembler::Fmsub(const FPRegister& fd, |
| 768 const FPRegister& fn, | 790 const FPRegister& fn, |
| (...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1007 ASSERT(allow_macro_instructions_); | 1029 ASSERT(allow_macro_instructions_); |
| 1008 ASSERT(!rd.IsZero()); | 1030 ASSERT(!rd.IsZero()); |
| 1009 rbit(rd, rn); | 1031 rbit(rd, rn); |
| 1010 } | 1032 } |
| 1011 | 1033 |
| 1012 | 1034 |
| 1013 void MacroAssembler::Ret(const Register& xn) { | 1035 void MacroAssembler::Ret(const Register& xn) { |
| 1014 ASSERT(allow_macro_instructions_); | 1036 ASSERT(allow_macro_instructions_); |
| 1015 ASSERT(!xn.IsZero()); | 1037 ASSERT(!xn.IsZero()); |
| 1016 ret(xn); | 1038 ret(xn); |
| 1017 CheckVeneers(false); | 1039 CheckVeneerPool(false); |
| 1018 } | 1040 } |
| 1019 | 1041 |
| 1020 | 1042 |
| 1021 void MacroAssembler::Rev(const Register& rd, const Register& rn) { | 1043 void MacroAssembler::Rev(const Register& rd, const Register& rn) { |
| 1022 ASSERT(allow_macro_instructions_); | 1044 ASSERT(allow_macro_instructions_); |
| 1023 ASSERT(!rd.IsZero()); | 1045 ASSERT(!rd.IsZero()); |
| 1024 rev(rd, rn); | 1046 rev(rd, rn); |
| 1025 } | 1047 } |
| 1026 | 1048 |
| 1027 | 1049 |
| (...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1344 void MacroAssembler::JumpIfNotSmi(Register value, Label* not_smi_label) { | 1366 void MacroAssembler::JumpIfNotSmi(Register value, Label* not_smi_label) { |
| 1345 JumpIfSmi(value, NULL, not_smi_label); | 1367 JumpIfSmi(value, NULL, not_smi_label); |
| 1346 } | 1368 } |
| 1347 | 1369 |
| 1348 | 1370 |
| 1349 void MacroAssembler::JumpIfBothSmi(Register value1, | 1371 void MacroAssembler::JumpIfBothSmi(Register value1, |
| 1350 Register value2, | 1372 Register value2, |
| 1351 Label* both_smi_label, | 1373 Label* both_smi_label, |
| 1352 Label* not_smi_label) { | 1374 Label* not_smi_label) { |
| 1353 STATIC_ASSERT((kSmiTagSize == 1) && (kSmiTag == 0)); | 1375 STATIC_ASSERT((kSmiTagSize == 1) && (kSmiTag == 0)); |
| 1376 UseScratchRegisterScope temps(this); |
| 1377 Register tmp = temps.AcquireX(); |
| 1354 // Check if both tag bits are clear. | 1378 // Check if both tag bits are clear. |
| 1355 Orr(Tmp0(), value1, value2); | 1379 Orr(tmp, value1, value2); |
| 1356 JumpIfSmi(Tmp0(), both_smi_label, not_smi_label); | 1380 JumpIfSmi(tmp, both_smi_label, not_smi_label); |
| 1357 } | 1381 } |
| 1358 | 1382 |
| 1359 | 1383 |
| 1360 void MacroAssembler::JumpIfEitherSmi(Register value1, | 1384 void MacroAssembler::JumpIfEitherSmi(Register value1, |
| 1361 Register value2, | 1385 Register value2, |
| 1362 Label* either_smi_label, | 1386 Label* either_smi_label, |
| 1363 Label* not_smi_label) { | 1387 Label* not_smi_label) { |
| 1364 STATIC_ASSERT((kSmiTagSize == 1) && (kSmiTag == 0)); | 1388 STATIC_ASSERT((kSmiTagSize == 1) && (kSmiTag == 0)); |
| 1389 UseScratchRegisterScope temps(this); |
| 1390 Register tmp = temps.AcquireX(); |
| 1365 // Check if either tag bit is clear. | 1391 // Check if either tag bit is clear. |
| 1366 And(Tmp0(), value1, value2); | 1392 And(tmp, value1, value2); |
| 1367 JumpIfSmi(Tmp0(), either_smi_label, not_smi_label); | 1393 JumpIfSmi(tmp, either_smi_label, not_smi_label); |
| 1368 } | 1394 } |
| 1369 | 1395 |
| 1370 | 1396 |
| 1371 void MacroAssembler::JumpIfEitherNotSmi(Register value1, | 1397 void MacroAssembler::JumpIfEitherNotSmi(Register value1, |
| 1372 Register value2, | 1398 Register value2, |
| 1373 Label* not_smi_label) { | 1399 Label* not_smi_label) { |
| 1374 JumpIfBothSmi(value1, value2, NULL, not_smi_label); | 1400 JumpIfBothSmi(value1, value2, NULL, not_smi_label); |
| 1375 } | 1401 } |
| 1376 | 1402 |
| 1377 | 1403 |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1430 } else if (not_string == NULL) { | 1456 } else if (not_string == NULL) { |
| 1431 TestAndBranchIfAllClear(type.W(), kIsNotStringMask, string); | 1457 TestAndBranchIfAllClear(type.W(), kIsNotStringMask, string); |
| 1432 } else { | 1458 } else { |
| 1433 TestAndBranchIfAnySet(type.W(), kIsNotStringMask, not_string); | 1459 TestAndBranchIfAnySet(type.W(), kIsNotStringMask, not_string); |
| 1434 B(string); | 1460 B(string); |
| 1435 } | 1461 } |
| 1436 } | 1462 } |
| 1437 | 1463 |
| 1438 | 1464 |
| 1439 void MacroAssembler::Push(Handle<Object> handle) { | 1465 void MacroAssembler::Push(Handle<Object> handle) { |
| 1440 Mov(Tmp0(), Operand(handle)); | 1466 UseScratchRegisterScope temps(this); |
| 1441 Push(Tmp0()); | 1467 Register tmp = temps.AcquireX(); |
| 1468 Mov(tmp, Operand(handle)); |
| 1469 Push(tmp); |
| 1442 } | 1470 } |
| 1443 | 1471 |
| 1444 | 1472 |
| 1445 void MacroAssembler::Claim(uint64_t count, uint64_t unit_size) { | 1473 void MacroAssembler::Claim(uint64_t count, uint64_t unit_size) { |
| 1446 uint64_t size = count * unit_size; | 1474 uint64_t size = count * unit_size; |
| 1447 | 1475 |
| 1448 if (size == 0) { | 1476 if (size == 0) { |
| 1449 return; | 1477 return; |
| 1450 } | 1478 } |
| 1451 | 1479 |
| 1452 if (csp.Is(StackPointer())) { | 1480 if (csp.Is(StackPointer())) { |
| 1453 ASSERT(size % 16 == 0); | 1481 ASSERT(size % 16 == 0); |
| 1454 } else { | 1482 } else { |
| 1455 BumpSystemStackPointer(size); | 1483 BumpSystemStackPointer(size); |
| 1456 } | 1484 } |
| 1457 | 1485 |
| 1458 Sub(StackPointer(), StackPointer(), size); | 1486 Sub(StackPointer(), StackPointer(), size); |
| 1459 } | 1487 } |
| 1460 | 1488 |
| 1461 | 1489 |
| 1462 void MacroAssembler::Claim(const Register& count, uint64_t unit_size) { | 1490 void MacroAssembler::Claim(const Register& count, uint64_t unit_size) { |
| 1463 ASSERT(IsPowerOf2(unit_size)); | 1491 ASSERT(IsPowerOf2(unit_size)); |
| 1464 | 1492 |
| 1465 if (unit_size == 0) { | 1493 if (unit_size == 0) { |
| 1466 return; | 1494 return; |
| 1467 } | 1495 } |
| 1468 | 1496 |
| 1469 const int shift = CountTrailingZeros(unit_size, kXRegSize); | 1497 const int shift = CountTrailingZeros(unit_size, kXRegSizeInBits); |
| 1470 const Operand size(count, LSL, shift); | 1498 const Operand size(count, LSL, shift); |
| 1471 | 1499 |
| 1472 if (size.IsZero()) { | 1500 if (size.IsZero()) { |
| 1473 return; | 1501 return; |
| 1474 } | 1502 } |
| 1475 | 1503 |
| 1476 if (!csp.Is(StackPointer())) { | 1504 if (!csp.Is(StackPointer())) { |
| 1477 BumpSystemStackPointer(size); | 1505 BumpSystemStackPointer(size); |
| 1478 } | 1506 } |
| 1479 | 1507 |
| 1480 Sub(StackPointer(), StackPointer(), size); | 1508 Sub(StackPointer(), StackPointer(), size); |
| 1481 } | 1509 } |
| 1482 | 1510 |
| 1483 | 1511 |
| 1484 void MacroAssembler::ClaimBySMI(const Register& count_smi, uint64_t unit_size) { | 1512 void MacroAssembler::ClaimBySMI(const Register& count_smi, uint64_t unit_size) { |
| 1485 ASSERT(IsPowerOf2(unit_size)); | 1513 ASSERT(IsPowerOf2(unit_size)); |
| 1486 const int shift = CountTrailingZeros(unit_size, kXRegSize) - kSmiShift; | 1514 const int shift = CountTrailingZeros(unit_size, kXRegSizeInBits) - kSmiShift; |
| 1487 const Operand size(count_smi, | 1515 const Operand size(count_smi, |
| 1488 (shift >= 0) ? (LSL) : (LSR), | 1516 (shift >= 0) ? (LSL) : (LSR), |
| 1489 (shift >= 0) ? (shift) : (-shift)); | 1517 (shift >= 0) ? (shift) : (-shift)); |
| 1490 | 1518 |
| 1491 if (size.IsZero()) { | 1519 if (size.IsZero()) { |
| 1492 return; | 1520 return; |
| 1493 } | 1521 } |
| 1494 | 1522 |
| 1495 if (!csp.Is(StackPointer())) { | 1523 if (!csp.Is(StackPointer())) { |
| 1496 BumpSystemStackPointer(size); | 1524 BumpSystemStackPointer(size); |
| (...skipping 23 matching lines...) Expand all Loading... |
| 1520 } | 1548 } |
| 1521 | 1549 |
| 1522 | 1550 |
| 1523 void MacroAssembler::Drop(const Register& count, uint64_t unit_size) { | 1551 void MacroAssembler::Drop(const Register& count, uint64_t unit_size) { |
| 1524 ASSERT(IsPowerOf2(unit_size)); | 1552 ASSERT(IsPowerOf2(unit_size)); |
| 1525 | 1553 |
| 1526 if (unit_size == 0) { | 1554 if (unit_size == 0) { |
| 1527 return; | 1555 return; |
| 1528 } | 1556 } |
| 1529 | 1557 |
| 1530 const int shift = CountTrailingZeros(unit_size, kXRegSize); | 1558 const int shift = CountTrailingZeros(unit_size, kXRegSizeInBits); |
| 1531 const Operand size(count, LSL, shift); | 1559 const Operand size(count, LSL, shift); |
| 1532 | 1560 |
| 1533 if (size.IsZero()) { | 1561 if (size.IsZero()) { |
| 1534 return; | 1562 return; |
| 1535 } | 1563 } |
| 1536 | 1564 |
| 1537 Add(StackPointer(), StackPointer(), size); | 1565 Add(StackPointer(), StackPointer(), size); |
| 1538 | 1566 |
| 1539 if (!csp.Is(StackPointer()) && emit_debug_code()) { | 1567 if (!csp.Is(StackPointer()) && emit_debug_code()) { |
| 1540 // It is safe to leave csp where it is when unwinding the JavaScript stack, | 1568 // It is safe to leave csp where it is when unwinding the JavaScript stack, |
| 1541 // but if we keep it matching StackPointer, the simulator can detect memory | 1569 // but if we keep it matching StackPointer, the simulator can detect memory |
| 1542 // accesses in the now-free part of the stack. | 1570 // accesses in the now-free part of the stack. |
| 1543 Mov(csp, StackPointer()); | 1571 Mov(csp, StackPointer()); |
| 1544 } | 1572 } |
| 1545 } | 1573 } |
| 1546 | 1574 |
| 1547 | 1575 |
| 1548 void MacroAssembler::DropBySMI(const Register& count_smi, uint64_t unit_size) { | 1576 void MacroAssembler::DropBySMI(const Register& count_smi, uint64_t unit_size) { |
| 1549 ASSERT(IsPowerOf2(unit_size)); | 1577 ASSERT(IsPowerOf2(unit_size)); |
| 1550 const int shift = CountTrailingZeros(unit_size, kXRegSize) - kSmiShift; | 1578 const int shift = CountTrailingZeros(unit_size, kXRegSizeInBits) - kSmiShift; |
| 1551 const Operand size(count_smi, | 1579 const Operand size(count_smi, |
| 1552 (shift >= 0) ? (LSL) : (LSR), | 1580 (shift >= 0) ? (LSL) : (LSR), |
| 1553 (shift >= 0) ? (shift) : (-shift)); | 1581 (shift >= 0) ? (shift) : (-shift)); |
| 1554 | 1582 |
| 1555 if (size.IsZero()) { | 1583 if (size.IsZero()) { |
| 1556 return; | 1584 return; |
| 1557 } | 1585 } |
| 1558 | 1586 |
| 1559 Add(StackPointer(), StackPointer(), size); | 1587 Add(StackPointer(), StackPointer(), size); |
| 1560 | 1588 |
| (...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1639 // characters are reserved for controlling features of the instrumentation. | 1667 // characters are reserved for controlling features of the instrumentation. |
| 1640 ASSERT(isprint(marker_name[0]) && isprint(marker_name[1])); | 1668 ASSERT(isprint(marker_name[0]) && isprint(marker_name[1])); |
| 1641 | 1669 |
| 1642 InstructionAccurateScope scope(this, 1); | 1670 InstructionAccurateScope scope(this, 1); |
| 1643 movn(xzr, (marker_name[1] << 8) | marker_name[0]); | 1671 movn(xzr, (marker_name[1] << 8) | marker_name[0]); |
| 1644 } | 1672 } |
| 1645 | 1673 |
| 1646 } } // namespace v8::internal | 1674 } } // namespace v8::internal |
| 1647 | 1675 |
| 1648 #endif // V8_A64_MACRO_ASSEMBLER_A64_INL_H_ | 1676 #endif // V8_A64_MACRO_ASSEMBLER_A64_INL_H_ |
| OLD | NEW |