| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 932 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 943 | 943 |
| 944 | 944 |
| 945 void MacroAssembler::Cvtlsi2sd(XMMRegister dst, const Operand& src) { | 945 void MacroAssembler::Cvtlsi2sd(XMMRegister dst, const Operand& src) { |
| 946 xorps(dst, dst); | 946 xorps(dst, dst); |
| 947 cvtlsi2sd(dst, src); | 947 cvtlsi2sd(dst, src); |
| 948 } | 948 } |
| 949 | 949 |
| 950 | 950 |
| 951 void MacroAssembler::Load(Register dst, const Operand& src, Representation r) { | 951 void MacroAssembler::Load(Register dst, const Operand& src, Representation r) { |
| 952 ASSERT(!r.IsDouble()); | 952 ASSERT(!r.IsDouble()); |
| 953 if (r.IsByte()) { | 953 if (r.IsInteger8()) { |
| 954 movsxbq(dst, src); |
| 955 } else if (r.IsUInteger8()) { |
| 954 movzxbl(dst, src); | 956 movzxbl(dst, src); |
| 957 } else if (r.IsInteger16()) { |
| 958 movsxwq(dst, src); |
| 959 } else if (r.IsUInteger16()) { |
| 960 movzxwl(dst, src); |
| 955 } else if (r.IsInteger32()) { | 961 } else if (r.IsInteger32()) { |
| 956 movl(dst, src); | 962 movl(dst, src); |
| 957 } else { | 963 } else { |
| 958 movq(dst, src); | 964 movq(dst, src); |
| 959 } | 965 } |
| 960 } | 966 } |
| 961 | 967 |
| 962 | 968 |
| 963 void MacroAssembler::Store(const Operand& dst, Register src, Representation r) { | 969 void MacroAssembler::Store(const Operand& dst, Register src, Representation r) { |
| 964 ASSERT(!r.IsDouble()); | 970 ASSERT(!r.IsDouble()); |
| 965 if (r.IsByte()) { | 971 if (r.IsInteger8() || r.IsUInteger8()) { |
| 966 movb(dst, src); | 972 movb(dst, src); |
| 973 } else if (r.IsInteger16() || r.IsUInteger16()) { |
| 974 movw(dst, src); |
| 967 } else if (r.IsInteger32()) { | 975 } else if (r.IsInteger32()) { |
| 968 movl(dst, src); | 976 movl(dst, src); |
| 969 } else { | 977 } else { |
| 970 movq(dst, src); | 978 movq(dst, src); |
| 971 } | 979 } |
| 972 } | 980 } |
| 973 | 981 |
| 974 | 982 |
| 975 void MacroAssembler::Set(Register dst, int64_t x) { | 983 void MacroAssembler::Set(Register dst, int64_t x) { |
| 976 if (x == 0) { | 984 if (x == 0) { |
| (...skipping 3988 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4965 movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE); | 4973 movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE); |
| 4966 CallCFunction( | 4974 CallCFunction( |
| 4967 ExternalReference::record_object_allocation_function(isolate), 3); | 4975 ExternalReference::record_object_allocation_function(isolate), 3); |
| 4968 PopSafepointRegisters(); | 4976 PopSafepointRegisters(); |
| 4969 } | 4977 } |
| 4970 | 4978 |
| 4971 | 4979 |
| 4972 } } // namespace v8::internal | 4980 } } // namespace v8::internal |
| 4973 | 4981 |
| 4974 #endif // V8_TARGET_ARCH_X64 | 4982 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |