OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 929 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
940 cvtlsi2sd(dst, src); | 940 cvtlsi2sd(dst, src); |
941 } | 941 } |
942 | 942 |
943 | 943 |
944 void MacroAssembler::Cvtlsi2sd(XMMRegister dst, const Operand& src) { | 944 void MacroAssembler::Cvtlsi2sd(XMMRegister dst, const Operand& src) { |
945 xorps(dst, dst); | 945 xorps(dst, dst); |
946 cvtlsi2sd(dst, src); | 946 cvtlsi2sd(dst, src); |
947 } | 947 } |
948 | 948 |
949 | 949 |
950 void MacroAssembler::Load(Register dst, const Operand& src, Representation r) { | |
951 if (r.IsByte()) { | |
952 movzxbl(dst, src); | |
953 } else if (r.IsInteger32()) { | |
954 movl(dst, src); | |
955 } else { | |
956 movq(dst, src); | |
957 } | |
958 } | |
959 | |
960 | |
961 void MacroAssembler::Store(const Operand& dst, Register src, Representation r) { | |
962 if (r.IsByte()) { | |
963 movb(dst, src); | |
964 } else if (r.IsInteger32()) { | |
965 movl(dst, src); | |
966 } else { | |
967 movq(dst, src); | |
968 } | |
969 } | |
970 | |
971 | |
972 void MacroAssembler::Set(Register dst, int64_t x) { | 950 void MacroAssembler::Set(Register dst, int64_t x) { |
973 if (x == 0) { | 951 if (x == 0) { |
974 xorl(dst, dst); | 952 xorl(dst, dst); |
975 } else if (is_uint32(x)) { | 953 } else if (is_uint32(x)) { |
976 movl(dst, Immediate(static_cast<uint32_t>(x))); | 954 movl(dst, Immediate(static_cast<uint32_t>(x))); |
977 } else if (is_int32(x)) { | 955 } else if (is_int32(x)) { |
978 movq(dst, Immediate(static_cast<int32_t>(x))); | 956 movq(dst, Immediate(static_cast<int32_t>(x))); |
979 } else { | 957 } else { |
980 movq(dst, x, RelocInfo::NONE64); | 958 movq(dst, x, RelocInfo::NONE64); |
981 } | 959 } |
(...skipping 3945 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4927 j(greater, &no_memento_available); | 4905 j(greater, &no_memento_available); |
4928 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), | 4906 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), |
4929 Heap::kAllocationMementoMapRootIndex); | 4907 Heap::kAllocationMementoMapRootIndex); |
4930 bind(&no_memento_available); | 4908 bind(&no_memento_available); |
4931 } | 4909 } |
4932 | 4910 |
4933 | 4911 |
4934 } } // namespace v8::internal | 4912 } } // namespace v8::internal |
4935 | 4913 |
4936 #endif // V8_TARGET_ARCH_X64 | 4914 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |