| OLD | NEW |
| 1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 578 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 589 ASSERT(!dst.is(kScratchRegister)); | 589 ASSERT(!dst.is(kScratchRegister)); |
| 590 if (src->value() == 0) { | 590 if (src->value() == 0) { |
| 591 testq(dst, dst); | 591 testq(dst, dst); |
| 592 } else { | 592 } else { |
| 593 Move(kScratchRegister, src); | 593 Move(kScratchRegister, src); |
| 594 cmpq(dst, kScratchRegister); | 594 cmpq(dst, kScratchRegister); |
| 595 } | 595 } |
| 596 } | 596 } |
| 597 | 597 |
| 598 | 598 |
| 599 void MacroAssembler::SmiCompare(Register dst, const Operand& src) { |
| 600 cmpq(dst, src); |
| 601 } |
| 602 |
| 603 |
| 599 void MacroAssembler::SmiCompare(const Operand& dst, Register src) { | 604 void MacroAssembler::SmiCompare(const Operand& dst, Register src) { |
| 600 cmpq(dst, src); | 605 cmpq(dst, src); |
| 601 } | 606 } |
| 602 | 607 |
| 603 | 608 |
| 604 void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) { | 609 void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) { |
| 605 if (src->value() == 0) { | 610 if (src->value() == 0) { |
| 606 // Only tagged long smi to have 32-bit representation. | 611 // Only tagged long smi to have 32-bit representation. |
| 607 cmpq(dst, Immediate(0)); | 612 cmpq(dst, Immediate(0)); |
| 608 } else { | 613 } else { |
| (...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 724 j(not_equal, on_smi_result); | 729 j(not_equal, on_smi_result); |
| 725 } | 730 } |
| 726 } | 731 } |
| 727 | 732 |
| 728 | 733 |
| 729 void MacroAssembler::SmiAdd(Register dst, | 734 void MacroAssembler::SmiAdd(Register dst, |
| 730 Register src1, | 735 Register src1, |
| 731 Register src2, | 736 Register src2, |
| 732 Label* on_not_smi_result) { | 737 Label* on_not_smi_result) { |
| 733 ASSERT(!dst.is(src2)); | 738 ASSERT(!dst.is(src2)); |
| 734 if (dst.is(src1)) { | 739 if (on_not_smi_result == NULL) { |
| 740 // No overflow checking. Use only when it's known that |
| 741 // overflowing is impossible. |
| 742 if (dst.is(src1)) { |
| 743 addq(dst, src2); |
| 744 } else { |
| 745 movq(dst, src1); |
| 746 addq(dst, src2); |
| 747 } |
| 748 Assert(no_overflow, "Smi addition onverflow"); |
| 749 } else if (dst.is(src1)) { |
| 735 addq(dst, src2); | 750 addq(dst, src2); |
| 736 Label smi_result; | 751 Label smi_result; |
| 737 j(no_overflow, &smi_result); | 752 j(no_overflow, &smi_result); |
| 738 // Restore src1. | 753 // Restore src1. |
| 739 subq(src1, src2); | 754 subq(src1, src2); |
| 740 jmp(on_not_smi_result); | 755 jmp(on_not_smi_result); |
| 741 bind(&smi_result); | 756 bind(&smi_result); |
| 742 } else { | 757 } else { |
| 743 movq(dst, src1); | 758 movq(dst, src1); |
| 744 addq(dst, src2); | 759 addq(dst, src2); |
| (...skipping 26 matching lines...) Expand all Loading... |
| 771 jmp(on_not_smi_result); | 786 jmp(on_not_smi_result); |
| 772 bind(&smi_result); | 787 bind(&smi_result); |
| 773 } else { | 788 } else { |
| 774 movq(dst, src1); | 789 movq(dst, src1); |
| 775 subq(dst, src2); | 790 subq(dst, src2); |
| 776 j(overflow, on_not_smi_result); | 791 j(overflow, on_not_smi_result); |
| 777 } | 792 } |
| 778 } | 793 } |
| 779 | 794 |
| 780 | 795 |
| 796 void MacroAssembler::SmiSub(Register dst, |
| 797 Register src1, |
| 798 Operand const& src2, |
| 799 Label* on_not_smi_result) { |
| 800 if (on_not_smi_result == NULL) { |
| 801 // No overflow checking. Use only when it's known that |
| 802 // overflowing is impossible (e.g., subtracting two positive smis). |
| 803 if (dst.is(src1)) { |
| 804 subq(dst, src2); |
| 805 } else { |
| 806 movq(dst, src1); |
| 807 subq(dst, src2); |
| 808 } |
| 809 Assert(no_overflow, "Smi substraction onverflow"); |
| 810 } else if (dst.is(src1)) { |
| 811 subq(dst, src2); |
| 812 Label smi_result; |
| 813 j(no_overflow, &smi_result); |
| 814 // Restore src1. |
| 815 addq(src1, src2); |
| 816 jmp(on_not_smi_result); |
| 817 bind(&smi_result); |
| 818 } else { |
| 819 movq(dst, src1); |
| 820 subq(dst, src2); |
| 821 j(overflow, on_not_smi_result); |
| 822 } |
| 823 } |
| 824 |
| 781 void MacroAssembler::SmiMul(Register dst, | 825 void MacroAssembler::SmiMul(Register dst, |
| 782 Register src1, | 826 Register src1, |
| 783 Register src2, | 827 Register src2, |
| 784 Label* on_not_smi_result) { | 828 Label* on_not_smi_result) { |
| 785 ASSERT(!dst.is(src2)); | 829 ASSERT(!dst.is(src2)); |
| 786 ASSERT(!dst.is(kScratchRegister)); | 830 ASSERT(!dst.is(kScratchRegister)); |
| 787 ASSERT(!src1.is(kScratchRegister)); | 831 ASSERT(!src1.is(kScratchRegister)); |
| 788 ASSERT(!src2.is(kScratchRegister)); | 832 ASSERT(!src2.is(kScratchRegister)); |
| 789 | 833 |
| 790 if (dst.is(src1)) { | 834 if (dst.is(src1)) { |
| (...skipping 1728 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2519 | 2563 |
| 2520 | 2564 |
| 2521 void MacroAssembler::AllocateTwoByteString(Register result, | 2565 void MacroAssembler::AllocateTwoByteString(Register result, |
| 2522 Register length, | 2566 Register length, |
| 2523 Register scratch1, | 2567 Register scratch1, |
| 2524 Register scratch2, | 2568 Register scratch2, |
| 2525 Register scratch3, | 2569 Register scratch3, |
| 2526 Label* gc_required) { | 2570 Label* gc_required) { |
| 2527 // Calculate the number of bytes needed for the characters in the string while | 2571 // Calculate the number of bytes needed for the characters in the string while |
| 2528 // observing object alignment. | 2572 // observing object alignment. |
| 2529 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0); | 2573 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize & |
| 2574 kObjectAlignmentMask; |
| 2530 ASSERT(kShortSize == 2); | 2575 ASSERT(kShortSize == 2); |
| 2531 // scratch1 = length * 2 + kObjectAlignmentMask. | 2576 // scratch1 = length * 2 + kObjectAlignmentMask. |
| 2532 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask)); | 2577 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask + |
| 2578 kHeaderAlignment)); |
| 2533 and_(scratch1, Immediate(~kObjectAlignmentMask)); | 2579 and_(scratch1, Immediate(~kObjectAlignmentMask)); |
| 2580 if (kHeaderAlignment > 0) { |
| 2581 subq(scratch1, Immediate(kHeaderAlignment)); |
| 2582 } |
| 2534 | 2583 |
| 2535 // Allocate two byte string in new space. | 2584 // Allocate two byte string in new space. |
| 2536 AllocateInNewSpace(SeqTwoByteString::kHeaderSize, | 2585 AllocateInNewSpace(SeqTwoByteString::kHeaderSize, |
| 2537 times_1, | 2586 times_1, |
| 2538 scratch1, | 2587 scratch1, |
| 2539 result, | 2588 result, |
| 2540 scratch2, | 2589 scratch2, |
| 2541 scratch3, | 2590 scratch3, |
| 2542 gc_required, | 2591 gc_required, |
| 2543 TAG_OBJECT); | 2592 TAG_OBJECT); |
| 2544 | 2593 |
| 2545 // Set the map, length and hash field. | 2594 // Set the map, length and hash field. |
| 2546 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex); | 2595 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex); |
| 2547 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); | 2596 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); |
| 2548 movl(FieldOperand(result, String::kLengthOffset), length); | 2597 Integer32ToSmi(scratch1, length); |
| 2598 movq(FieldOperand(result, String::kLengthOffset), scratch1); |
| 2549 movl(FieldOperand(result, String::kHashFieldOffset), | 2599 movl(FieldOperand(result, String::kHashFieldOffset), |
| 2550 Immediate(String::kEmptyHashField)); | 2600 Immediate(String::kEmptyHashField)); |
| 2551 } | 2601 } |
| 2552 | 2602 |
| 2553 | 2603 |
| 2554 void MacroAssembler::AllocateAsciiString(Register result, | 2604 void MacroAssembler::AllocateAsciiString(Register result, |
| 2555 Register length, | 2605 Register length, |
| 2556 Register scratch1, | 2606 Register scratch1, |
| 2557 Register scratch2, | 2607 Register scratch2, |
| 2558 Register scratch3, | 2608 Register scratch3, |
| 2559 Label* gc_required) { | 2609 Label* gc_required) { |
| 2560 // Calculate the number of bytes needed for the characters in the string while | 2610 // Calculate the number of bytes needed for the characters in the string while |
| 2561 // observing object alignment. | 2611 // observing object alignment. |
| 2562 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0); | 2612 const int kHeaderAlignment = SeqAsciiString::kHeaderSize & |
| 2613 kObjectAlignmentMask; |
| 2563 movl(scratch1, length); | 2614 movl(scratch1, length); |
| 2564 ASSERT(kCharSize == 1); | 2615 ASSERT(kCharSize == 1); |
| 2565 addq(scratch1, Immediate(kObjectAlignmentMask)); | 2616 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment)); |
| 2566 and_(scratch1, Immediate(~kObjectAlignmentMask)); | 2617 and_(scratch1, Immediate(~kObjectAlignmentMask)); |
| 2618 if (kHeaderAlignment > 0) { |
| 2619 subq(scratch1, Immediate(kHeaderAlignment)); |
| 2620 } |
| 2567 | 2621 |
| 2568 // Allocate ascii string in new space. | 2622 // Allocate ascii string in new space. |
| 2569 AllocateInNewSpace(SeqAsciiString::kHeaderSize, | 2623 AllocateInNewSpace(SeqAsciiString::kHeaderSize, |
| 2570 times_1, | 2624 times_1, |
| 2571 scratch1, | 2625 scratch1, |
| 2572 result, | 2626 result, |
| 2573 scratch2, | 2627 scratch2, |
| 2574 scratch3, | 2628 scratch3, |
| 2575 gc_required, | 2629 gc_required, |
| 2576 TAG_OBJECT); | 2630 TAG_OBJECT); |
| 2577 | 2631 |
| 2578 // Set the map, length and hash field. | 2632 // Set the map, length and hash field. |
| 2579 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex); | 2633 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex); |
| 2580 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); | 2634 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); |
| 2581 movl(FieldOperand(result, String::kLengthOffset), length); | 2635 Integer32ToSmi(scratch1, length); |
| 2636 movq(FieldOperand(result, String::kLengthOffset), scratch1); |
| 2582 movl(FieldOperand(result, String::kHashFieldOffset), | 2637 movl(FieldOperand(result, String::kHashFieldOffset), |
| 2583 Immediate(String::kEmptyHashField)); | 2638 Immediate(String::kEmptyHashField)); |
| 2584 } | 2639 } |
| 2585 | 2640 |
| 2586 | 2641 |
| 2587 void MacroAssembler::AllocateConsString(Register result, | 2642 void MacroAssembler::AllocateConsString(Register result, |
| 2588 Register scratch1, | 2643 Register scratch1, |
| 2589 Register scratch2, | 2644 Register scratch2, |
| 2590 Label* gc_required) { | 2645 Label* gc_required) { |
| 2591 // Allocate heap number in new space. | 2646 // Allocate heap number in new space. |
| (...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2701 CodePatcher::~CodePatcher() { | 2756 CodePatcher::~CodePatcher() { |
| 2702 // Indicate that code has changed. | 2757 // Indicate that code has changed. |
| 2703 CPU::FlushICache(address_, size_); | 2758 CPU::FlushICache(address_, size_); |
| 2704 | 2759 |
| 2705 // Check that the code was patched as expected. | 2760 // Check that the code was patched as expected. |
| 2706 ASSERT(masm_.pc_ == address_ + size_); | 2761 ASSERT(masm_.pc_ == address_ + size_); |
| 2707 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2762 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
| 2708 } | 2763 } |
| 2709 | 2764 |
| 2710 } } // namespace v8::internal | 2765 } } // namespace v8::internal |
| OLD | NEW |