| OLD | NEW | 
|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. | 
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be | 
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. | 
| 4 | 4 | 
| 5 #if V8_TARGET_ARCH_X64 | 5 #if V8_TARGET_ARCH_X64 | 
| 6 | 6 | 
| 7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" | 
| 8 #include "src/base/division-by-constant.h" | 8 #include "src/base/division-by-constant.h" | 
| 9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" | 
| 10 #include "src/codegen.h" | 10 #include "src/codegen.h" | 
| (...skipping 881 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 892     CpuFeatureScope scope(this, AVX); | 892     CpuFeatureScope scope(this, AVX); | 
| 893     vxorpd(dst, dst, dst); | 893     vxorpd(dst, dst, dst); | 
| 894     vcvtqsi2sd(dst, dst, src); | 894     vcvtqsi2sd(dst, dst, src); | 
| 895   } else { | 895   } else { | 
| 896     xorpd(dst, dst); | 896     xorpd(dst, dst); | 
| 897     cvtqsi2sd(dst, src); | 897     cvtqsi2sd(dst, src); | 
| 898   } | 898   } | 
| 899 } | 899 } | 
| 900 | 900 | 
| 901 | 901 | 
|  | 902 void MacroAssembler::Cvtqui2ss(XMMRegister dst, Register src, Register tmp) { | 
|  | 903   Label msb_set_src; | 
|  | 904   Label jmp_return; | 
|  | 905   testq(src, src); | 
|  | 906   j(sign, &msb_set_src, Label::kNear); | 
|  | 907   Cvtqsi2ss(dst, src); | 
|  | 908   jmp(&jmp_return, Label::kNear); | 
|  | 909   bind(&msb_set_src); | 
|  | 910   movq(tmp, src); | 
|  | 911   shrq(src, Immediate(1)); | 
|  | 912   // Recover the least significant bit to avoid rounding errors. | 
|  | 913   andq(tmp, Immediate(1)); | 
|  | 914   orq(src, tmp); | 
|  | 915   Cvtqsi2ss(dst, src); | 
|  | 916   addss(dst, dst); | 
|  | 917   bind(&jmp_return); | 
|  | 918 } | 
|  | 919 | 
|  | 920 | 
| 902 void MacroAssembler::Cvtqui2sd(XMMRegister dst, Register src, Register tmp) { | 921 void MacroAssembler::Cvtqui2sd(XMMRegister dst, Register src, Register tmp) { | 
| 903   Label msb_set_src; | 922   Label msb_set_src; | 
| 904   Label jmp_return; | 923   Label jmp_return; | 
| 905   testq(src, src); | 924   testq(src, src); | 
| 906   j(sign, &msb_set_src, Label::kNear); | 925   j(sign, &msb_set_src, Label::kNear); | 
| 907   Cvtqsi2sd(dst, src); | 926   Cvtqsi2sd(dst, src); | 
| 908   jmp(&jmp_return, Label::kNear); | 927   jmp(&jmp_return, Label::kNear); | 
| 909   bind(&msb_set_src); | 928   bind(&msb_set_src); | 
| 910   movq(tmp, src); | 929   movq(tmp, src); | 
| 911   shrq(src, Immediate(1)); | 930   shrq(src, Immediate(1)); | 
| 912   // Recover the least significant bit to avoid rounding errors. |  | 
| 913   andq(tmp, Immediate(1)); | 931   andq(tmp, Immediate(1)); | 
| 914   orq(src, tmp); | 932   orq(src, tmp); | 
| 915   Cvtqsi2sd(dst, src); | 933   Cvtqsi2sd(dst, src); | 
| 916   addsd(dst, dst); | 934   addsd(dst, dst); | 
| 917   bind(&jmp_return); | 935   bind(&jmp_return); | 
| 918 } | 936 } | 
| 919 | 937 | 
| 920 | 938 | 
| 921 void MacroAssembler::Cvtsd2si(Register dst, XMMRegister src) { | 939 void MacroAssembler::Cvtsd2si(Register dst, XMMRegister src) { | 
| 922   if (CpuFeatures::IsSupported(AVX)) { | 940   if (CpuFeatures::IsSupported(AVX)) { | 
| (...skipping 4572 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 5495   movl(rax, dividend); | 5513   movl(rax, dividend); | 
| 5496   shrl(rax, Immediate(31)); | 5514   shrl(rax, Immediate(31)); | 
| 5497   addl(rdx, rax); | 5515   addl(rdx, rax); | 
| 5498 } | 5516 } | 
| 5499 | 5517 | 
| 5500 | 5518 | 
| 5501 }  // namespace internal | 5519 }  // namespace internal | 
| 5502 }  // namespace v8 | 5520 }  // namespace v8 | 
| 5503 | 5521 | 
| 5504 #endif  // V8_TARGET_ARCH_X64 | 5522 #endif  // V8_TARGET_ARCH_X64 | 
| OLD | NEW | 
|---|