OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_X87 | 5 #if V8_TARGET_ARCH_X87 |
6 | 6 |
7 #include "src/crankshaft/x87/lithium-codegen-x87.h" | 7 #include "src/crankshaft/x87/lithium-codegen-x87.h" |
8 | 8 |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/code-factory.h" | 10 #include "src/code-factory.h" |
(...skipping 2062 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2073 if (operation == HMathMinMax::kMathMin) { | 2073 if (operation == HMathMinMax::kMathMin) { |
2074 // Push st0 and st1 to stack, then pop them to temp registers and OR them, | 2074 // Push st0 and st1 to stack, then pop them to temp registers and OR them, |
2075 // load it to left. | 2075 // load it to left. |
2076 Register scratch_reg = ToRegister(instr->temp()); | 2076 Register scratch_reg = ToRegister(instr->temp()); |
2077 __ fld(1); | 2077 __ fld(1); |
2078 __ fld(1); | 2078 __ fld(1); |
2079 __ sub(esp, Immediate(2 * kPointerSize)); | 2079 __ sub(esp, Immediate(2 * kPointerSize)); |
2080 __ fstp_s(MemOperand(esp, 0)); | 2080 __ fstp_s(MemOperand(esp, 0)); |
2081 __ fstp_s(MemOperand(esp, kPointerSize)); | 2081 __ fstp_s(MemOperand(esp, kPointerSize)); |
2082 __ pop(scratch_reg); | 2082 __ pop(scratch_reg); |
2083 __ xor_(MemOperand(esp, 0), scratch_reg); | 2083 __ or_(MemOperand(esp, 0), scratch_reg); |
2084 X87Mov(left_reg, MemOperand(esp, 0), kX87FloatOperand); | 2084 X87Mov(left_reg, MemOperand(esp, 0), kX87FloatOperand); |
2085 __ pop(scratch_reg); // restore esp | 2085 __ pop(scratch_reg); // restore esp |
2086 } else { | 2086 } else { |
2087 // Since we operate on +0 and/or -0, addsd and andsd have the same effect. | 2087 // Since we operate on +0 and/or -0, addsd and andsd have the same effect. |
2088 X87Fxch(left_reg); | 2088 X87Fxch(left_reg); |
2089 __ fadd(1); | 2089 __ fadd(1); |
2090 } | 2090 } |
2091 __ jmp(&return_left, Label::kNear); | 2091 __ jmp(&return_left, Label::kNear); |
2092 | 2092 |
2093 __ bind(&check_nan_left); | 2093 __ bind(&check_nan_left); |
(...skipping 3858 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5952 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), context); | 5952 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), context); |
5953 } | 5953 } |
5954 | 5954 |
5955 | 5955 |
5956 #undef __ | 5956 #undef __ |
5957 | 5957 |
5958 } // namespace internal | 5958 } // namespace internal |
5959 } // namespace v8 | 5959 } // namespace v8 |
5960 | 5960 |
5961 #endif // V8_TARGET_ARCH_X87 | 5961 #endif // V8_TARGET_ARCH_X87 |
OLD | NEW |