Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 518 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 529 __ Push(descriptor->register_params_[i]); | 529 __ Push(descriptor->register_params_[i]); |
| 530 } | 530 } |
| 531 ExternalReference miss = descriptor->miss_handler(); | 531 ExternalReference miss = descriptor->miss_handler(); |
| 532 __ CallExternalReference(miss, descriptor->register_param_count_); | 532 __ CallExternalReference(miss, descriptor->register_param_count_); |
| 533 } | 533 } |
| 534 | 534 |
| 535 __ Ret(); | 535 __ Ret(); |
| 536 } | 536 } |
| 537 | 537 |
| 538 | 538 |
| 539 void DoubleToIStub::Generate(MacroAssembler* masm) { | |
|
jbramley
2014/02/13 14:20:14
It seems that this stub doesn't always truncate on
rmcilroy
2014/02/14 12:45:07
Done (also on arm). Looks like the is_truncating()
jbramley
2014/02/14 12:57:38
If we do, we should probably rename the stub too,
| |
| 540 Label done; | |
| 541 Register input = source(); | |
| 542 Register result = destination(); | |
| 543 | |
| 544 ASSERT(result.Is64Bits()); | |
| 545 ASSERT(jssp.Is(masm->StackPointer())); | |
| 546 | |
| 547 int double_offset = offset(); | |
| 548 // Account for saved regs if input is sp. | |
| 549 if (input.is(jssp)) double_offset += 2 * kPointerSize; | |
|
jbramley
2014/02/13 14:20:14
Do this near Push(scratch1, scratch2) so it's clea
rmcilroy
2014/02/14 12:45:07
Done.
| |
| 550 | |
| 551 DoubleRegister double_scratch = d0; // only used if !skip_fastpath() | |
|
jbramley
2014/02/13 14:20:14
In that case, put it in the !skip_fastpath() scope
rmcilroy
2014/02/14 12:45:07
I can't - I need to Pop it at the end of the funct
| |
| 552 Register scratch1 = GetRegisterThatIsNotOneOf(input, result); | |
| 553 Register scratch2 = | |
| 554 GetRegisterThatIsNotOneOf(input, result, scratch1); | |
| 555 | |
| 556 __ Push(scratch1, scratch2); | |
|
jbramley
2014/02/13 14:20:14
This is probably the only stub that preserves its
rmcilroy
2014/02/14 12:45:07
I'm not sure what you mean here? What would you me
jbramley
2014/02/14 12:57:38
At the moment, MarkAsCall preserves every live val
rmcilroy
2014/02/14 13:44:18
Ok, done. Just FYI - I expect we will be adding m
| |
| 557 | |
| 558 if (!skip_fastpath()) { | |
| 559 // Try to convert with a FPU convert instruction. This handles all | |
| 560 // non-saturating cases. | |
| 561 __ Push(double_scratch); | |
|
jbramley
2014/02/13 14:20:14
If you could combine this with the previous Push,
rmcilroy
2014/02/14 12:45:07
I can't - Push() barfs that the registers are of d
jbramley
2014/02/14 12:57:38
Ah, yes, sorry.
| |
| 562 if (input.is(jssp)) double_offset += 1 * kDoubleSize; | |
| 563 __ Ldr(double_scratch, MemOperand(input, double_offset)); | |
| 564 __ TryInlineTruncateDoubleToI(result, double_scratch, &done); | |
| 565 __ Fmov(result, double_scratch); | |
| 566 } else { | |
| 567 __ Ldr(result, MemOperand(input, double_offset)); | |
| 568 } | |
| 569 | |
| 570 // If we reach here we need to manually convert the input to an int32. | |
| 571 | |
| 572 // Extract the exponent. | |
| 573 Register exponent = scratch1; | |
| 574 __ Ubfx(exponent, result, HeapNumber::kMantissaBits, | |
| 575 HeapNumber::kExponentBits); | |
| 576 | |
| 577 // It the exponent is >= 84 (kMantissaBits + 32), the result is always 0 since | |
| 578 // the mantissa gets shifted completely out of the int32_t result. | |
| 579 __ Cmp(exponent, HeapNumber::kExponentBias + HeapNumber::kMantissaBits + 32); | |
| 580 __ CzeroX(result, ge); | |
| 581 __ B(ge, &done); | |
| 582 | |
| 583 // The Fcvtzs sequence handles all cases except where the conversion causes | |
| 584 // signed overflow in the int64_t target. Since we've already handled | |
| 585 // exponents >= 84, we can guarantee that 63 <= exponent < 84. | |
| 586 | |
| 587 if (masm->emit_debug_code()) { | |
| 588 __ Cmp(exponent, HeapNumber::kExponentBias + 63); | |
| 589 // Exponents less than this should have been handled by the Fcvt case. | |
| 590 __ Check(ge, kUnexpectedValue); | |
| 591 } | |
| 592 | |
| 593 // Isolate the mantissa bits, and set the implicit '1'. | |
| 594 Register mantissa = scratch2; | |
| 595 __ Ubfx(mantissa, result, 0, HeapNumber::kMantissaBits); | |
| 596 __ Orr(mantissa, mantissa, 1UL << HeapNumber::kMantissaBits); | |
| 597 | |
| 598 // Negate the mantissa if necessary. | |
| 599 __ Tst(result, kXSignMask); | |
| 600 __ Cneg(mantissa, mantissa, ne); | |
| 601 | |
| 602 // Shift the mantissa bits in the correct place. We know that we have to shift | |
| 603 // it left here, because exponent >= 63 >= kMantissaBits. | |
| 604 __ Sub(exponent, exponent, | |
| 605 HeapNumber::kExponentBias + HeapNumber::kMantissaBits); | |
| 606 __ Lsl(result, mantissa, exponent); | |
| 607 | |
| 608 __ Bind(&done); | |
| 609 if (!skip_fastpath()) { | |
| 610 __ Pop(double_scratch); | |
| 611 } | |
| 612 __ Pop(scratch2, scratch1); | |
| 613 __ Ret(); | |
| 614 } | |
| 615 | |
| 616 | |
| 539 // See call site for description. | 617 // See call site for description. |
| 540 static void EmitIdenticalObjectComparison(MacroAssembler* masm, | 618 static void EmitIdenticalObjectComparison(MacroAssembler* masm, |
| 541 Register left, | 619 Register left, |
| 542 Register right, | 620 Register right, |
| 543 Register scratch, | 621 Register scratch, |
| 544 FPRegister double_scratch, | 622 FPRegister double_scratch, |
| 545 Label* slow, | 623 Label* slow, |
| 546 Condition cond) { | 624 Condition cond) { |
| 547 ASSERT(!AreAliased(left, right, scratch)); | 625 ASSERT(!AreAliased(left, right, scratch)); |
| 548 Label not_identical, return_equal, heap_number; | 626 Label not_identical, return_equal, heap_number; |
| (...skipping 5163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 5712 MemOperand(fp, 6 * kPointerSize), | 5790 MemOperand(fp, 6 * kPointerSize), |
| 5713 NULL); | 5791 NULL); |
| 5714 } | 5792 } |
| 5715 | 5793 |
| 5716 | 5794 |
| 5717 #undef __ | 5795 #undef __ |
| 5718 | 5796 |
| 5719 } } // namespace v8::internal | 5797 } } // namespace v8::internal |
| 5720 | 5798 |
| 5721 #endif // V8_TARGET_ARCH_A64 | 5799 #endif // V8_TARGET_ARCH_A64 |
| OLD | NEW |