OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1777 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1788 andl(scratch2, Immediate(kFlatAsciiStringMask)); | 1788 andl(scratch2, Immediate(kFlatAsciiStringMask)); |
1789 // Interleave the bits to check both scratch1 and scratch2 in one test. | 1789 // Interleave the bits to check both scratch1 and scratch2 in one test. |
1790 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3)); | 1790 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3)); |
1791 lea(scratch1, Operand(scratch1, scratch2, times_8, 0)); | 1791 lea(scratch1, Operand(scratch1, scratch2, times_8, 0)); |
1792 cmpl(scratch1, | 1792 cmpl(scratch1, |
1793 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3))); | 1793 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3))); |
1794 j(not_equal, on_fail); | 1794 j(not_equal, on_fail); |
1795 } | 1795 } |
1796 | 1796 |
1797 | 1797 |
| 1798 void MacroAssembler::Move(Register dst, Register src) { |
| 1799 if (!dst.is(src)) { |
| 1800 movq(dst, src); |
| 1801 } |
| 1802 } |
| 1803 |
| 1804 |
| 1805 |
| 1806 |
1798 void MacroAssembler::Move(Register dst, Handle<Object> source) { | 1807 void MacroAssembler::Move(Register dst, Handle<Object> source) { |
1799 ASSERT(!source->IsFailure()); | 1808 ASSERT(!source->IsFailure()); |
1800 if (source->IsSmi()) { | 1809 if (source->IsSmi()) { |
1801 Move(dst, Smi::cast(*source)); | 1810 Move(dst, Smi::cast(*source)); |
1802 } else { | 1811 } else { |
1803 movq(dst, source, RelocInfo::EMBEDDED_OBJECT); | 1812 movq(dst, source, RelocInfo::EMBEDDED_OBJECT); |
1804 } | 1813 } |
1805 } | 1814 } |
1806 | 1815 |
1807 | 1816 |
(...skipping 1060 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2868 CPU::FlushICache(address_, size_); | 2877 CPU::FlushICache(address_, size_); |
2869 | 2878 |
2870 // Check that the code was patched as expected. | 2879 // Check that the code was patched as expected. |
2871 ASSERT(masm_.pc_ == address_ + size_); | 2880 ASSERT(masm_.pc_ == address_ + size_); |
2872 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2881 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
2873 } | 2882 } |
2874 | 2883 |
2875 } } // namespace v8::internal | 2884 } } // namespace v8::internal |
2876 | 2885 |
2877 #endif // V8_TARGET_ARCH_X64 | 2886 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |