OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 438 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
449 movq(dst, Immediate(static_cast<int32_t>(x))); | 449 movq(dst, Immediate(static_cast<int32_t>(x))); |
450 } else if (is_uint32(x)) { | 450 } else if (is_uint32(x)) { |
451 movl(dst, Immediate(static_cast<uint32_t>(x))); | 451 movl(dst, Immediate(static_cast<uint32_t>(x))); |
452 } else { | 452 } else { |
453 movq(dst, x, RelocInfo::NONE); | 453 movq(dst, x, RelocInfo::NONE); |
454 } | 454 } |
455 } | 455 } |
456 | 456 |
457 | 457 |
458 void MacroAssembler::Set(const Operand& dst, int64_t x) { | 458 void MacroAssembler::Set(const Operand& dst, int64_t x) { |
459 if (x == 0) { | 459 if (is_int32(x)) { |
460 xor_(kScratchRegister, kScratchRegister); | |
461 movq(dst, kScratchRegister); | |
462 } else if (is_int32(x)) { | |
463 movq(dst, Immediate(static_cast<int32_t>(x))); | 460 movq(dst, Immediate(static_cast<int32_t>(x))); |
464 } else if (is_uint32(x)) { | |
465 movl(dst, Immediate(static_cast<uint32_t>(x))); | |
466 } else { | 461 } else { |
467 movq(kScratchRegister, x, RelocInfo::NONE); | 462 movq(kScratchRegister, x, RelocInfo::NONE); |
468 movq(dst, kScratchRegister); | 463 movq(dst, kScratchRegister); |
469 } | 464 } |
470 } | 465 } |
471 | 466 |
472 // ---------------------------------------------------------------------------- | 467 // ---------------------------------------------------------------------------- |
473 // Smi tagging, untagging and tag detection. | 468 // Smi tagging, untagging and tag detection. |
474 | 469 |
475 static int kSmiShift = kSmiTagSize + kSmiShiftSize; | 470 static int kSmiShift = kSmiTagSize + kSmiShiftSize; |
(...skipping 2285 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2761 CPU::FlushICache(address_, size_); | 2756 CPU::FlushICache(address_, size_); |
2762 | 2757 |
2763 // Check that the code was patched as expected. | 2758 // Check that the code was patched as expected. |
2764 ASSERT(masm_.pc_ == address_ + size_); | 2759 ASSERT(masm_.pc_ == address_ + size_); |
2765 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2760 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
2766 } | 2761 } |
2767 | 2762 |
2768 } } // namespace v8::internal | 2763 } } // namespace v8::internal |
2769 | 2764 |
2770 #endif // V8_TARGET_ARCH_X64 | 2765 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |