OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 929 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
940 cvtlsi2sd(dst, src); | 940 cvtlsi2sd(dst, src); |
941 } | 941 } |
942 | 942 |
943 | 943 |
944 void MacroAssembler::Cvtlsi2sd(XMMRegister dst, const Operand& src) { | 944 void MacroAssembler::Cvtlsi2sd(XMMRegister dst, const Operand& src) { |
945 xorps(dst, dst); | 945 xorps(dst, dst); |
946 cvtlsi2sd(dst, src); | 946 cvtlsi2sd(dst, src); |
947 } | 947 } |
948 | 948 |
949 | 949 |
| 950 void MacroAssembler::Load(Register dst, const Operand& src, Representation r) { |
| 951 if (r.IsByte()) { |
| 952 movzxbl(dst, src); |
| 953 } else if (r.IsInteger32()) { |
| 954 movl(dst, src); |
| 955 } else { |
| 956 movq(dst, src); |
| 957 } |
| 958 } |
| 959 |
| 960 |
| 961 void MacroAssembler::Store(const Operand& dst, Register src, Representation r) { |
| 962 if (r.IsByte()) { |
| 963 movb(dst, src); |
| 964 } else if (r.IsInteger32()) { |
| 965 movl(dst, src); |
| 966 } else { |
| 967 movq(dst, src); |
| 968 } |
| 969 } |
| 970 |
| 971 |
950 void MacroAssembler::Set(Register dst, int64_t x) { | 972 void MacroAssembler::Set(Register dst, int64_t x) { |
951 if (x == 0) { | 973 if (x == 0) { |
952 xorl(dst, dst); | 974 xorl(dst, dst); |
953 } else if (is_uint32(x)) { | 975 } else if (is_uint32(x)) { |
954 movl(dst, Immediate(static_cast<uint32_t>(x))); | 976 movl(dst, Immediate(static_cast<uint32_t>(x))); |
955 } else if (is_int32(x)) { | 977 } else if (is_int32(x)) { |
956 movq(dst, Immediate(static_cast<int32_t>(x))); | 978 movq(dst, Immediate(static_cast<int32_t>(x))); |
957 } else { | 979 } else { |
958 movq(dst, x, RelocInfo::NONE64); | 980 movq(dst, x, RelocInfo::NONE64); |
959 } | 981 } |
(...skipping 3941 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4901 j(greater, &no_memento_available); | 4923 j(greater, &no_memento_available); |
4902 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), | 4924 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), |
4903 Heap::kAllocationMementoMapRootIndex); | 4925 Heap::kAllocationMementoMapRootIndex); |
4904 bind(&no_memento_available); | 4926 bind(&no_memento_available); |
4905 } | 4927 } |
4906 | 4928 |
4907 | 4929 |
4908 } } // namespace v8::internal | 4930 } } // namespace v8::internal |
4909 | 4931 |
4910 #endif // V8_TARGET_ARCH_X64 | 4932 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |