| OLD | NEW |
| 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1535 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1546 if (push_pop) { | 1546 if (push_pop) { |
| 1547 add(Operand(esp), Immediate(kPointerSize)); // Pop. | 1547 add(Operand(esp), Immediate(kPointerSize)); // Pop. |
| 1548 } | 1548 } |
| 1549 if (!scratch.is(dst)) { | 1549 if (!scratch.is(dst)) { |
| 1550 mov(dst, scratch); | 1550 mov(dst, scratch); |
| 1551 } | 1551 } |
| 1552 } | 1552 } |
| 1553 } | 1553 } |
| 1554 | 1554 |
| 1555 | 1555 |
| 1556 void MacroAssembler::LoadPowerOf2(XMMRegister dst, |
| 1557 Register scratch, |
| 1558 int power) { |
| 1559 ASSERT(is_uintn(power + HeapNumber::kExponentBias, |
| 1560 HeapNumber::kExponentBits)); |
| 1561 mov(scratch, Immediate(power + HeapNumber::kExponentBias)); |
| 1562 movd(dst, Operand(scratch)); |
| 1563 psllq(dst, HeapNumber::kMantissaBits); |
| 1564 } |
| 1565 |
| 1566 |
| 1556 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii( | 1567 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii( |
| 1557 Register instance_type, | 1568 Register instance_type, |
| 1558 Register scratch, | 1569 Register scratch, |
| 1559 Label* failure) { | 1570 Label* failure) { |
| 1560 if (!scratch.is(instance_type)) { | 1571 if (!scratch.is(instance_type)) { |
| 1561 mov(scratch, instance_type); | 1572 mov(scratch, instance_type); |
| 1562 } | 1573 } |
| 1563 and_(scratch, | 1574 and_(scratch, |
| 1564 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask); | 1575 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask); |
| 1565 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag); | 1576 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag); |
| (...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1654 | 1665 |
| 1655 // Check that the code was patched as expected. | 1666 // Check that the code was patched as expected. |
| 1656 ASSERT(masm_.pc_ == address_ + size_); | 1667 ASSERT(masm_.pc_ == address_ + size_); |
| 1657 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 1668 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
| 1658 } | 1669 } |
| 1659 | 1670 |
| 1660 | 1671 |
| 1661 } } // namespace v8::internal | 1672 } } // namespace v8::internal |
| 1662 | 1673 |
| 1663 #endif // V8_TARGET_ARCH_IA32 | 1674 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |