| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 587 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 598 case Token::BIT_NOT: | 598 case Token::BIT_NOT: |
| 599 GenerateSmiStubBitNot(masm); | 599 GenerateSmiStubBitNot(masm); |
| 600 break; | 600 break; |
| 601 default: | 601 default: |
| 602 UNREACHABLE(); | 602 UNREACHABLE(); |
| 603 } | 603 } |
| 604 } | 604 } |
| 605 | 605 |
| 606 | 606 |
| 607 void TypeRecordingUnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { | 607 void TypeRecordingUnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { |
| 608 NearLabel non_smi; | 608 Label non_smi, undo, slow; |
| 609 Label undo, slow; | 609 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow, |
| 610 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow); | 610 Label::kNear, Label::kNear, Label::kNear); |
| 611 __ bind(&undo); | 611 __ bind(&undo); |
| 612 GenerateSmiCodeUndo(masm); | 612 GenerateSmiCodeUndo(masm); |
| 613 __ bind(&non_smi); | 613 __ bind(&non_smi); |
| 614 __ bind(&slow); | 614 __ bind(&slow); |
| 615 GenerateTypeTransition(masm); | 615 GenerateTypeTransition(masm); |
| 616 } | 616 } |
| 617 | 617 |
| 618 | 618 |
| 619 void TypeRecordingUnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { | 619 void TypeRecordingUnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { |
| 620 NearLabel non_smi; | 620 Label non_smi; |
| 621 GenerateSmiCodeBitNot(masm, &non_smi); | 621 GenerateSmiCodeBitNot(masm, &non_smi); |
| 622 __ bind(&non_smi); | 622 __ bind(&non_smi); |
| 623 GenerateTypeTransition(masm); | 623 GenerateTypeTransition(masm); |
| 624 } | 624 } |
| 625 | 625 |
| 626 | 626 |
| 627 void TypeRecordingUnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, | 627 void TypeRecordingUnaryOpStub::GenerateSmiCodeSub( |
| 628 NearLabel* non_smi, | 628 MacroAssembler* masm, Label* non_smi, Label* undo, Label* slow, |
| 629 Label* undo, | 629 Label::Distance non_smi_near, Label::Distance undo_near, |
| 630 Label* slow) { | 630 Label::Distance slow_near) { |
| 631 // Check whether the value is a smi. | 631 // Check whether the value is a smi. |
| 632 __ test(eax, Immediate(kSmiTagMask)); | 632 __ test(eax, Immediate(kSmiTagMask)); |
| 633 __ j(not_zero, non_smi); | 633 __ j(not_zero, non_smi, non_smi_near); |
| 634 | 634 |
| 635 // We can't handle -0 with smis, so use a type transition for that case. | 635 // We can't handle -0 with smis, so use a type transition for that case. |
| 636 __ test(eax, Operand(eax)); | 636 __ test(eax, Operand(eax)); |
| 637 __ j(zero, slow); | 637 __ j(zero, slow, slow_near); |
| 638 | 638 |
| 639 // Try optimistic subtraction '0 - value', saving operand in eax for undo. | 639 // Try optimistic subtraction '0 - value', saving operand in eax for undo. |
| 640 __ mov(edx, Operand(eax)); | 640 __ mov(edx, Operand(eax)); |
| 641 __ Set(eax, Immediate(0)); | 641 __ Set(eax, Immediate(0)); |
| 642 __ sub(eax, Operand(edx)); | 642 __ sub(eax, Operand(edx)); |
| 643 __ j(overflow, undo); | 643 __ j(overflow, undo, undo_near); |
| 644 __ ret(0); | 644 __ ret(0); |
| 645 } | 645 } |
| 646 | 646 |
| 647 | 647 |
| 648 void TypeRecordingUnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm, | 648 void TypeRecordingUnaryOpStub::GenerateSmiCodeBitNot( |
| 649 NearLabel* non_smi) { | 649 MacroAssembler* masm, |
| 650 Label* non_smi, |
| 651 Label::Distance non_smi_near) { |
| 650 // Check whether the value is a smi. | 652 // Check whether the value is a smi. |
| 651 __ test(eax, Immediate(kSmiTagMask)); | 653 __ test(eax, Immediate(kSmiTagMask)); |
| 652 __ j(not_zero, non_smi); | 654 __ j(not_zero, non_smi, non_smi_near); |
| 653 | 655 |
| 654 // Flip bits and revert inverted smi-tag. | 656 // Flip bits and revert inverted smi-tag. |
| 655 __ not_(eax); | 657 __ not_(eax); |
| 656 __ and_(eax, ~kSmiTagMask); | 658 __ and_(eax, ~kSmiTagMask); |
| 657 __ ret(0); | 659 __ ret(0); |
| 658 } | 660 } |
| 659 | 661 |
| 660 | 662 |
| 661 void TypeRecordingUnaryOpStub::GenerateSmiCodeUndo(MacroAssembler* masm) { | 663 void TypeRecordingUnaryOpStub::GenerateSmiCodeUndo(MacroAssembler* masm) { |
| 662 __ mov(eax, Operand(edx)); | 664 __ mov(eax, Operand(edx)); |
| 663 } | 665 } |
| 664 | 666 |
| 665 | 667 |
| 666 // TODO(svenpanne): Use virtual functions instead of switch. | 668 // TODO(svenpanne): Use virtual functions instead of switch. |
| 667 void TypeRecordingUnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { | 669 void TypeRecordingUnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { |
| 668 switch (op_) { | 670 switch (op_) { |
| 669 case Token::SUB: | 671 case Token::SUB: |
| 670 GenerateHeapNumberStubSub(masm); | 672 GenerateHeapNumberStubSub(masm); |
| 671 break; | 673 break; |
| 672 case Token::BIT_NOT: | 674 case Token::BIT_NOT: |
| 673 GenerateHeapNumberStubBitNot(masm); | 675 GenerateHeapNumberStubBitNot(masm); |
| 674 break; | 676 break; |
| 675 default: | 677 default: |
| 676 UNREACHABLE(); | 678 UNREACHABLE(); |
| 677 } | 679 } |
| 678 } | 680 } |
| 679 | 681 |
| 680 | 682 |
| 681 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { | 683 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { |
| 682 NearLabel non_smi; | 684 Label non_smi, undo, slow; |
| 683 Label undo, slow; | 685 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow, Label::kNear); |
| 684 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow); | |
| 685 __ bind(&non_smi); | 686 __ bind(&non_smi); |
| 686 GenerateHeapNumberCodeSub(masm, &slow); | 687 GenerateHeapNumberCodeSub(masm, &slow); |
| 687 __ bind(&undo); | 688 __ bind(&undo); |
| 688 GenerateSmiCodeUndo(masm); | 689 GenerateSmiCodeUndo(masm); |
| 689 __ bind(&slow); | 690 __ bind(&slow); |
| 690 GenerateTypeTransition(masm); | 691 GenerateTypeTransition(masm); |
| 691 } | 692 } |
| 692 | 693 |
| 693 | 694 |
| 694 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubBitNot( | 695 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubBitNot( |
| 695 MacroAssembler* masm) { | 696 MacroAssembler* masm) { |
| 696 NearLabel non_smi; | 697 Label non_smi, slow; |
| 697 Label slow; | 698 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); |
| 698 GenerateSmiCodeBitNot(masm, &non_smi); | |
| 699 __ bind(&non_smi); | 699 __ bind(&non_smi); |
| 700 GenerateHeapNumberCodeBitNot(masm, &slow); | 700 GenerateHeapNumberCodeBitNot(masm, &slow); |
| 701 __ bind(&slow); | 701 __ bind(&slow); |
| 702 GenerateTypeTransition(masm); | 702 GenerateTypeTransition(masm); |
| 703 } | 703 } |
| 704 | 704 |
| 705 | 705 |
| 706 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, | 706 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, |
| 707 Label* slow) { | 707 Label* slow) { |
| 708 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); | 708 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 800 case Token::BIT_NOT: | 800 case Token::BIT_NOT: |
| 801 GenerateGenericStubBitNot(masm); | 801 GenerateGenericStubBitNot(masm); |
| 802 break; | 802 break; |
| 803 default: | 803 default: |
| 804 UNREACHABLE(); | 804 UNREACHABLE(); |
| 805 } | 805 } |
| 806 } | 806 } |
| 807 | 807 |
| 808 | 808 |
| 809 void TypeRecordingUnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { | 809 void TypeRecordingUnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { |
| 810 NearLabel non_smi; | 810 Label non_smi, undo, slow; |
| 811 Label undo, slow; | 811 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow, Label::kNear); |
| 812 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow); | |
| 813 __ bind(&non_smi); | 812 __ bind(&non_smi); |
| 814 GenerateHeapNumberCodeSub(masm, &slow); | 813 GenerateHeapNumberCodeSub(masm, &slow); |
| 815 __ bind(&undo); | 814 __ bind(&undo); |
| 816 GenerateSmiCodeUndo(masm); | 815 GenerateSmiCodeUndo(masm); |
| 817 __ bind(&slow); | 816 __ bind(&slow); |
| 818 GenerateGenericCodeFallback(masm); | 817 GenerateGenericCodeFallback(masm); |
| 819 } | 818 } |
| 820 | 819 |
| 821 | 820 |
| 822 void TypeRecordingUnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { | 821 void TypeRecordingUnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { |
| 823 NearLabel non_smi; | 822 Label non_smi, slow; |
| 824 Label slow; | 823 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); |
| 825 GenerateSmiCodeBitNot(masm, &non_smi); | |
| 826 __ bind(&non_smi); | 824 __ bind(&non_smi); |
| 827 GenerateHeapNumberCodeBitNot(masm, &slow); | 825 GenerateHeapNumberCodeBitNot(masm, &slow); |
| 828 __ bind(&slow); | 826 __ bind(&slow); |
| 829 GenerateGenericCodeFallback(masm); | 827 GenerateGenericCodeFallback(masm); |
| 830 } | 828 } |
| 831 | 829 |
| 832 | 830 |
| 833 void TypeRecordingUnaryOpStub::GenerateGenericCodeFallback( | 831 void TypeRecordingUnaryOpStub::GenerateGenericCodeFallback( |
| 834 MacroAssembler* masm) { | 832 MacroAssembler* masm) { |
| 835 // Handle the slow case by jumping to the corresponding JavaScript builtin. | 833 // Handle the slow case by jumping to the corresponding JavaScript builtin. |
| (...skipping 4777 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5613 | 5611 |
| 5614 | 5612 |
| 5615 void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm, | 5613 void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm, |
| 5616 Register left, | 5614 Register left, |
| 5617 Register right, | 5615 Register right, |
| 5618 Register scratch1, | 5616 Register scratch1, |
| 5619 Register scratch2) { | 5617 Register scratch2) { |
| 5620 Register length = scratch1; | 5618 Register length = scratch1; |
| 5621 | 5619 |
| 5622 // Compare lengths. | 5620 // Compare lengths. |
| 5623 NearLabel strings_not_equal; | 5621 Label strings_not_equal, check_zero_length; |
| 5624 Label check_zero_length; | |
| 5625 __ mov(length, FieldOperand(left, String::kLengthOffset)); | 5622 __ mov(length, FieldOperand(left, String::kLengthOffset)); |
| 5626 __ cmp(length, FieldOperand(right, String::kLengthOffset)); | 5623 __ cmp(length, FieldOperand(right, String::kLengthOffset)); |
| 5627 __ j(equal, &check_zero_length, Label::kNear); | 5624 __ j(equal, &check_zero_length, Label::kNear); |
| 5628 __ bind(&strings_not_equal); | 5625 __ bind(&strings_not_equal); |
| 5629 __ Set(eax, Immediate(Smi::FromInt(NOT_EQUAL))); | 5626 __ Set(eax, Immediate(Smi::FromInt(NOT_EQUAL))); |
| 5630 __ ret(0); | 5627 __ ret(0); |
| 5631 | 5628 |
| 5632 // Check if the length is zero. | 5629 // Check if the length is zero. |
| 5633 Label compare_chars; | 5630 Label compare_chars; |
| 5634 __ bind(&check_zero_length); | 5631 __ bind(&check_zero_length); |
| 5635 STATIC_ASSERT(kSmiTag == 0); | 5632 STATIC_ASSERT(kSmiTag == 0); |
| 5636 __ test(length, Operand(length)); | 5633 __ test(length, Operand(length)); |
| 5637 __ j(not_zero, &compare_chars, Label::kNear); | 5634 __ j(not_zero, &compare_chars, Label::kNear); |
| 5638 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); | 5635 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); |
| 5639 __ ret(0); | 5636 __ ret(0); |
| 5640 | 5637 |
| 5641 // Compare characters. | 5638 // Compare characters. |
| 5642 __ bind(&compare_chars); | 5639 __ bind(&compare_chars); |
| 5643 GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2, | 5640 GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2, |
| 5644 &strings_not_equal); | 5641 &strings_not_equal, Label::kNear); |
| 5645 | 5642 |
| 5646 // Characters are equal. | 5643 // Characters are equal. |
| 5647 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); | 5644 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); |
| 5648 __ ret(0); | 5645 __ ret(0); |
| 5649 } | 5646 } |
| 5650 | 5647 |
| 5651 | 5648 |
| 5652 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm, | 5649 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm, |
| 5653 Register left, | 5650 Register left, |
| 5654 Register right, | 5651 Register right, |
| (...skipping 17 matching lines...) Expand all Loading... |
| 5672 __ bind(&left_shorter); | 5669 __ bind(&left_shorter); |
| 5673 | 5670 |
| 5674 Register min_length = scratch1; | 5671 Register min_length = scratch1; |
| 5675 | 5672 |
| 5676 // If either length is zero, just compare lengths. | 5673 // If either length is zero, just compare lengths. |
| 5677 Label compare_lengths; | 5674 Label compare_lengths; |
| 5678 __ test(min_length, Operand(min_length)); | 5675 __ test(min_length, Operand(min_length)); |
| 5679 __ j(zero, &compare_lengths, Label::kNear); | 5676 __ j(zero, &compare_lengths, Label::kNear); |
| 5680 | 5677 |
| 5681 // Compare characters. | 5678 // Compare characters. |
| 5682 NearLabel result_not_equal; | 5679 Label result_not_equal; |
| 5683 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2, | 5680 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2, |
| 5684 &result_not_equal); | 5681 &result_not_equal, Label::kNear); |
| 5685 | 5682 |
| 5686 // Compare lengths - strings up to min-length are equal. | 5683 // Compare lengths - strings up to min-length are equal. |
| 5687 __ bind(&compare_lengths); | 5684 __ bind(&compare_lengths); |
| 5688 __ test(length_delta, Operand(length_delta)); | 5685 __ test(length_delta, Operand(length_delta)); |
| 5689 __ j(not_zero, &result_not_equal); | 5686 __ j(not_zero, &result_not_equal, Label::kNear); |
| 5690 | 5687 |
| 5691 // Result is EQUAL. | 5688 // Result is EQUAL. |
| 5692 STATIC_ASSERT(EQUAL == 0); | 5689 STATIC_ASSERT(EQUAL == 0); |
| 5693 STATIC_ASSERT(kSmiTag == 0); | 5690 STATIC_ASSERT(kSmiTag == 0); |
| 5694 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); | 5691 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); |
| 5695 __ ret(0); | 5692 __ ret(0); |
| 5696 | 5693 |
| 5697 Label result_greater; | 5694 Label result_greater; |
| 5698 __ bind(&result_not_equal); | 5695 __ bind(&result_not_equal); |
| 5699 __ j(greater, &result_greater, Label::kNear); | 5696 __ j(greater, &result_greater, Label::kNear); |
| 5700 | 5697 |
| 5701 // Result is LESS. | 5698 // Result is LESS. |
| 5702 __ Set(eax, Immediate(Smi::FromInt(LESS))); | 5699 __ Set(eax, Immediate(Smi::FromInt(LESS))); |
| 5703 __ ret(0); | 5700 __ ret(0); |
| 5704 | 5701 |
| 5705 // Result is GREATER. | 5702 // Result is GREATER. |
| 5706 __ bind(&result_greater); | 5703 __ bind(&result_greater); |
| 5707 __ Set(eax, Immediate(Smi::FromInt(GREATER))); | 5704 __ Set(eax, Immediate(Smi::FromInt(GREATER))); |
| 5708 __ ret(0); | 5705 __ ret(0); |
| 5709 } | 5706 } |
| 5710 | 5707 |
| 5711 | 5708 |
| 5712 void StringCompareStub::GenerateAsciiCharsCompareLoop( | 5709 void StringCompareStub::GenerateAsciiCharsCompareLoop( |
| 5713 MacroAssembler* masm, | 5710 MacroAssembler* masm, |
| 5714 Register left, | 5711 Register left, |
| 5715 Register right, | 5712 Register right, |
| 5716 Register length, | 5713 Register length, |
| 5717 Register scratch, | 5714 Register scratch, |
| 5718 NearLabel* chars_not_equal) { | 5715 Label* chars_not_equal, |
| 5716 Label::Distance chars_not_equal_near) { |
| 5719 // Change index to run from -length to -1 by adding length to string | 5717 // Change index to run from -length to -1 by adding length to string |
| 5720 // start. This means that loop ends when index reaches zero, which | 5718 // start. This means that loop ends when index reaches zero, which |
| 5721 // doesn't need an additional compare. | 5719 // doesn't need an additional compare. |
| 5722 __ SmiUntag(length); | 5720 __ SmiUntag(length); |
| 5723 __ lea(left, | 5721 __ lea(left, |
| 5724 FieldOperand(left, length, times_1, SeqAsciiString::kHeaderSize)); | 5722 FieldOperand(left, length, times_1, SeqAsciiString::kHeaderSize)); |
| 5725 __ lea(right, | 5723 __ lea(right, |
| 5726 FieldOperand(right, length, times_1, SeqAsciiString::kHeaderSize)); | 5724 FieldOperand(right, length, times_1, SeqAsciiString::kHeaderSize)); |
| 5727 __ neg(length); | 5725 __ neg(length); |
| 5728 Register index = length; // index = -length; | 5726 Register index = length; // index = -length; |
| 5729 | 5727 |
| 5730 // Compare loop. | 5728 // Compare loop. |
| 5731 Label loop; | 5729 Label loop; |
| 5732 __ bind(&loop); | 5730 __ bind(&loop); |
| 5733 __ mov_b(scratch, Operand(left, index, times_1, 0)); | 5731 __ mov_b(scratch, Operand(left, index, times_1, 0)); |
| 5734 __ cmpb(scratch, Operand(right, index, times_1, 0)); | 5732 __ cmpb(scratch, Operand(right, index, times_1, 0)); |
| 5735 __ j(not_equal, chars_not_equal); | 5733 __ j(not_equal, chars_not_equal, chars_not_equal_near); |
| 5736 __ add(Operand(index), Immediate(1)); | 5734 __ add(Operand(index), Immediate(1)); |
| 5737 __ j(not_zero, &loop); | 5735 __ j(not_zero, &loop); |
| 5738 } | 5736 } |
| 5739 | 5737 |
| 5740 | 5738 |
| 5741 void StringCompareStub::Generate(MacroAssembler* masm) { | 5739 void StringCompareStub::Generate(MacroAssembler* masm) { |
| 5742 Label runtime; | 5740 Label runtime; |
| 5743 | 5741 |
| 5744 // Stack frame on entry. | 5742 // Stack frame on entry. |
| 5745 // esp[0]: return address | 5743 // esp[0]: return address |
| (...skipping 497 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6243 __ Drop(1); | 6241 __ Drop(1); |
| 6244 __ ret(2 * kPointerSize); | 6242 __ ret(2 * kPointerSize); |
| 6245 } | 6243 } |
| 6246 | 6244 |
| 6247 | 6245 |
| 6248 #undef __ | 6246 #undef __ |
| 6249 | 6247 |
| 6250 } } // namespace v8::internal | 6248 } } // namespace v8::internal |
| 6251 | 6249 |
| 6252 #endif // V8_TARGET_ARCH_IA32 | 6250 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |