OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_X64 | 5 #if V8_TARGET_ARCH_X64 |
6 | 6 |
7 #include "src/crankshaft/x64/lithium-codegen-x64.h" | 7 #include "src/crankshaft/x64/lithium-codegen-x64.h" |
8 | 8 |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/code-factory.h" | 10 #include "src/code-factory.h" |
(...skipping 3249 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3260 } | 3260 } |
3261 | 3261 |
3262 | 3262 |
3263 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { | 3263 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { |
3264 Register input_reg = ToRegister(instr->value()); | 3264 Register input_reg = ToRegister(instr->value()); |
3265 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), | 3265 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), |
3266 Heap::kHeapNumberMapRootIndex); | 3266 Heap::kHeapNumberMapRootIndex); |
3267 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); | 3267 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); |
3268 | 3268 |
3269 Label slow, allocated, done; | 3269 Label slow, allocated, done; |
3270 Register tmp = input_reg.is(rax) ? rcx : rax; | 3270 uint32_t available_regs = rax.bit() | rcx.bit() | rdx.bit() | rbx.bit(); |
3271 Register tmp2 = tmp.is(rcx) ? rdx : input_reg.is(rcx) ? rdx : rcx; | 3271 available_regs &= ~input_reg.bit(); |
| 3272 if (instr->context()->IsRegister()) { |
| 3273 // Make sure that the context isn't overwritten in the AllocateHeapNumber |
| 3274 // macro below. |
| 3275 available_regs &= ~ToRegister(instr->context()).bit(); |
| 3276 } |
| 3277 |
| 3278 Register tmp = |
| 3279 Register::from_code(base::bits::CountTrailingZeros32(available_regs)); |
| 3280 available_regs &= ~tmp.bit(); |
| 3281 Register tmp2 = |
| 3282 Register::from_code(base::bits::CountTrailingZeros32(available_regs)); |
3272 | 3283 |
3273 // Preserve the value of all registers. | 3284 // Preserve the value of all registers. |
3274 PushSafepointRegistersScope scope(this); | 3285 PushSafepointRegistersScope scope(this); |
3275 | 3286 |
3276 __ movl(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); | 3287 __ movl(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); |
3277 // Check the sign of the argument. If the argument is positive, just | 3288 // Check the sign of the argument. If the argument is positive, just |
3278 // return it. We do not need to patch the stack since |input| and | 3289 // return it. We do not need to patch the stack since |input| and |
3279 // |result| are the same register and |input| will be restored | 3290 // |result| are the same register and |input| will be restored |
3280 // unchanged by popping safepoint registers. | 3291 // unchanged by popping safepoint registers. |
3281 __ testl(tmp, Immediate(HeapNumber::kSignMask)); | 3292 __ testl(tmp, Immediate(HeapNumber::kSignMask)); |
(...skipping 2332 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5614 __ bind(deferred->exit()); | 5625 __ bind(deferred->exit()); |
5615 __ bind(&done); | 5626 __ bind(&done); |
5616 } | 5627 } |
5617 | 5628 |
5618 #undef __ | 5629 #undef __ |
5619 | 5630 |
5620 } // namespace internal | 5631 } // namespace internal |
5621 } // namespace v8 | 5632 } // namespace v8 |
5622 | 5633 |
5623 #endif // V8_TARGET_ARCH_X64 | 5634 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |