OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_X64 | 7 #if V8_TARGET_ARCH_X64 |
8 | 8 |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/base/division-by-constant.h" | 10 #include "src/base/division-by-constant.h" |
(...skipping 2560 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2571 MoveHeapObject(kScratchRegister, source); | 2571 MoveHeapObject(kScratchRegister, source); |
2572 movp(dst, kScratchRegister); | 2572 movp(dst, kScratchRegister); |
2573 } | 2573 } |
2574 } | 2574 } |
2575 | 2575 |
2576 | 2576 |
2577 void MacroAssembler::Move(XMMRegister dst, uint32_t src) { | 2577 void MacroAssembler::Move(XMMRegister dst, uint32_t src) { |
2578 if (src == 0) { | 2578 if (src == 0) { |
2579 xorps(dst, dst); | 2579 xorps(dst, dst); |
2580 } else { | 2580 } else { |
2581 movl(kScratchRegister, Immediate(src)); | 2581 unsigned pop = base::bits::CountPopulation32(src); |
2582 movq(dst, kScratchRegister); | 2582 DCHECK_NE(0u, pop); |
| 2583 if (pop == 32) { |
| 2584 pcmpeqd(dst, dst); |
| 2585 } else { |
| 2586 movl(kScratchRegister, Immediate(src)); |
| 2587 movq(dst, kScratchRegister); |
| 2588 } |
2583 } | 2589 } |
2584 } | 2590 } |
2585 | 2591 |
2586 | 2592 |
2587 void MacroAssembler::Move(XMMRegister dst, uint64_t src) { | 2593 void MacroAssembler::Move(XMMRegister dst, uint64_t src) { |
2588 uint32_t lower = static_cast<uint32_t>(src); | 2594 if (src == 0) { |
2589 uint32_t upper = static_cast<uint32_t>(src >> 32); | 2595 xorps(dst, dst); |
2590 if (upper == 0) { | |
2591 Move(dst, lower); | |
2592 } else { | 2596 } else { |
2593 if (lower == 0) { | 2597 unsigned nlz = base::bits::CountLeadingZeros64(src); |
2594 Move(dst, upper); | 2598 unsigned ntz = base::bits::CountTrailingZeros64(src); |
2595 psllq(dst, 32); | 2599 unsigned pop = base::bits::CountPopulation64(src); |
| 2600 DCHECK_NE(0u, pop); |
| 2601 if (pop == 64) { |
| 2602 pcmpeqd(dst, dst); |
| 2603 } else if (pop + ntz == 64) { |
| 2604 pcmpeqd(dst, dst); |
| 2605 psllq(dst, ntz); |
| 2606 } else if (pop + nlz == 64) { |
| 2607 pcmpeqd(dst, dst); |
| 2608 psrlq(dst, nlz); |
2596 } else { | 2609 } else { |
2597 movq(kScratchRegister, src); | 2610 uint32_t lower = static_cast<uint32_t>(src); |
2598 movq(dst, kScratchRegister); | 2611 uint32_t upper = static_cast<uint32_t>(src >> 32); |
| 2612 if (upper == 0) { |
| 2613 Move(dst, lower); |
| 2614 } else { |
| 2615 movq(kScratchRegister, src); |
| 2616 movq(dst, kScratchRegister); |
| 2617 } |
2599 } | 2618 } |
2600 } | 2619 } |
2601 } | 2620 } |
2602 | 2621 |
2603 | 2622 |
2604 void MacroAssembler::Cmp(Register dst, Handle<Object> source) { | 2623 void MacroAssembler::Cmp(Register dst, Handle<Object> source) { |
2605 AllowDeferredHandleDereference smi_check; | 2624 AllowDeferredHandleDereference smi_check; |
2606 if (source->IsSmi()) { | 2625 if (source->IsSmi()) { |
2607 Cmp(dst, Smi::cast(*source)); | 2626 Cmp(dst, Smi::cast(*source)); |
2608 } else { | 2627 } else { |
(...skipping 2601 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5210 if (mag.shift > 0) sarl(rdx, Immediate(mag.shift)); | 5229 if (mag.shift > 0) sarl(rdx, Immediate(mag.shift)); |
5211 movl(rax, dividend); | 5230 movl(rax, dividend); |
5212 shrl(rax, Immediate(31)); | 5231 shrl(rax, Immediate(31)); |
5213 addl(rdx, rax); | 5232 addl(rdx, rax); |
5214 } | 5233 } |
5215 | 5234 |
5216 | 5235 |
5217 } } // namespace v8::internal | 5236 } } // namespace v8::internal |
5218 | 5237 |
5219 #endif // V8_TARGET_ARCH_X64 | 5238 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |