OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/mips64/codegen-mips64.h" | 5 #include "src/mips64/codegen-mips64.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_MIPS64 | 7 #if V8_TARGET_ARCH_MIPS64 |
8 | 8 |
9 #include <memory> | 9 #include <memory> |
10 | 10 |
(...skipping 589 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
600 DCHECK(masm->has_frame()); | 600 DCHECK(masm->has_frame()); |
601 masm->set_has_frame(false); | 601 masm->set_has_frame(false); |
602 } | 602 } |
603 | 603 |
604 | 604 |
605 // ------------------------------------------------------------------------- | 605 // ------------------------------------------------------------------------- |
606 // Code generators | 606 // Code generators |
607 | 607 |
608 #define __ ACCESS_MASM(masm) | 608 #define __ ACCESS_MASM(masm) |
609 | 609 |
610 void ElementsTransitionGenerator::GenerateMapChangeElementsTransition( | |
611 MacroAssembler* masm, | |
612 Register receiver, | |
613 Register key, | |
614 Register value, | |
615 Register target_map, | |
616 AllocationSiteMode mode, | |
617 Label* allocation_memento_found) { | |
618 Register scratch_elements = a4; | |
619 DCHECK(!AreAliased(receiver, key, value, target_map, | |
620 scratch_elements)); | |
621 | |
622 if (mode == TRACK_ALLOCATION_SITE) { | |
623 __ JumpIfJSArrayHasAllocationMemento( | |
624 receiver, scratch_elements, allocation_memento_found); | |
625 } | |
626 | |
627 // Set transitioned map. | |
628 __ sd(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
629 __ RecordWriteField(receiver, | |
630 HeapObject::kMapOffset, | |
631 target_map, | |
632 t1, | |
633 kRAHasNotBeenSaved, | |
634 kDontSaveFPRegs, | |
635 EMIT_REMEMBERED_SET, | |
636 OMIT_SMI_CHECK); | |
637 } | |
638 | |
639 | |
640 void ElementsTransitionGenerator::GenerateSmiToDouble( | |
641 MacroAssembler* masm, | |
642 Register receiver, | |
643 Register key, | |
644 Register value, | |
645 Register target_map, | |
646 AllocationSiteMode mode, | |
647 Label* fail) { | |
648 // Register ra contains the return address. | |
649 Label loop, entry, convert_hole, gc_required, only_change_map, done; | |
650 Register elements = a4; | |
651 Register length = a5; | |
652 Register array = a6; | |
653 Register array_end = array; | |
654 | |
655 // target_map parameter can be clobbered. | |
656 Register scratch1 = target_map; | |
657 Register scratch2 = t1; | |
658 Register scratch3 = a7; | |
659 | |
660 // Verify input registers don't conflict with locals. | |
661 DCHECK(!AreAliased(receiver, key, value, target_map, | |
662 elements, length, array, scratch2)); | |
663 | |
664 Register scratch = t2; | |
665 if (mode == TRACK_ALLOCATION_SITE) { | |
666 __ JumpIfJSArrayHasAllocationMemento(receiver, elements, fail); | |
667 } | |
668 | |
669 // Check for empty arrays, which only require a map transition and no changes | |
670 // to the backing store. | |
671 __ ld(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); | |
672 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex); | |
673 __ Branch(&only_change_map, eq, at, Operand(elements)); | |
674 | |
675 __ push(ra); | |
676 __ ld(length, FieldMemOperand(elements, FixedArray::kLengthOffset)); | |
677 // elements: source FixedArray | |
678 // length: number of elements (smi-tagged) | |
679 | |
680 // Allocate new FixedDoubleArray. | |
681 __ SmiScale(scratch, length, kDoubleSizeLog2); | |
682 __ Daddu(scratch, scratch, FixedDoubleArray::kHeaderSize); | |
683 __ Allocate(scratch, array, t3, scratch2, &gc_required, DOUBLE_ALIGNMENT); | |
684 __ Dsubu(array, array, kHeapObjectTag); | |
685 // array: destination FixedDoubleArray, not tagged as heap object | |
686 | |
687 // Set destination FixedDoubleArray's length and map. | |
688 __ LoadRoot(scratch2, Heap::kFixedDoubleArrayMapRootIndex); | |
689 __ sd(length, MemOperand(array, FixedDoubleArray::kLengthOffset)); | |
690 // Update receiver's map. | |
691 __ sd(scratch2, MemOperand(array, HeapObject::kMapOffset)); | |
692 | |
693 __ sd(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
694 __ RecordWriteField(receiver, | |
695 HeapObject::kMapOffset, | |
696 target_map, | |
697 scratch2, | |
698 kRAHasBeenSaved, | |
699 kDontSaveFPRegs, | |
700 OMIT_REMEMBERED_SET, | |
701 OMIT_SMI_CHECK); | |
702 // Replace receiver's backing store with newly created FixedDoubleArray. | |
703 __ Daddu(scratch1, array, Operand(kHeapObjectTag)); | |
704 __ sd(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset)); | |
705 __ RecordWriteField(receiver, | |
706 JSObject::kElementsOffset, | |
707 scratch1, | |
708 scratch2, | |
709 kRAHasBeenSaved, | |
710 kDontSaveFPRegs, | |
711 EMIT_REMEMBERED_SET, | |
712 OMIT_SMI_CHECK); | |
713 | |
714 | |
715 // Prepare for conversion loop. | |
716 __ Daddu(scratch1, elements, | |
717 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
718 __ Daddu(scratch3, array, Operand(FixedDoubleArray::kHeaderSize)); | |
719 __ SmiScale(array_end, length, kDoubleSizeLog2); | |
720 __ Daddu(array_end, array_end, scratch3); | |
721 | |
722 // Repurpose registers no longer in use. | |
723 Register hole_lower = elements; | |
724 Register hole_upper = length; | |
725 __ li(hole_lower, Operand(kHoleNanLower32)); | |
726 __ li(hole_upper, Operand(kHoleNanUpper32)); | |
727 | |
728 // scratch1: begin of source FixedArray element fields, not tagged | |
729 // hole_lower: kHoleNanLower32 | |
730 // hole_upper: kHoleNanUpper32 | |
731 // array_end: end of destination FixedDoubleArray, not tagged | |
732 // scratch3: begin of FixedDoubleArray element fields, not tagged | |
733 | |
734 __ Branch(&entry); | |
735 | |
736 __ bind(&only_change_map); | |
737 __ sd(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
738 __ RecordWriteField(receiver, | |
739 HeapObject::kMapOffset, | |
740 target_map, | |
741 scratch2, | |
742 kRAHasBeenSaved, | |
743 kDontSaveFPRegs, | |
744 OMIT_REMEMBERED_SET, | |
745 OMIT_SMI_CHECK); | |
746 __ Branch(&done); | |
747 | |
748 // Call into runtime if GC is required. | |
749 __ bind(&gc_required); | |
750 __ ld(ra, MemOperand(sp, 0)); | |
751 __ Branch(USE_DELAY_SLOT, fail); | |
752 __ daddiu(sp, sp, kPointerSize); // In delay slot. | |
753 | |
754 // Convert and copy elements. | |
755 __ bind(&loop); | |
756 __ ld(scratch2, MemOperand(scratch1)); | |
757 __ Daddu(scratch1, scratch1, kPointerSize); | |
758 // scratch2: current element | |
759 __ JumpIfNotSmi(scratch2, &convert_hole); | |
760 __ SmiUntag(scratch2); | |
761 | |
762 // Normal smi, convert to double and store. | |
763 __ mtc1(scratch2, f0); | |
764 __ cvt_d_w(f0, f0); | |
765 __ sdc1(f0, MemOperand(scratch3)); | |
766 __ Branch(USE_DELAY_SLOT, &entry); | |
767 __ daddiu(scratch3, scratch3, kDoubleSize); // In delay slot. | |
768 | |
769 // Hole found, store the-hole NaN. | |
770 __ bind(&convert_hole); | |
771 if (FLAG_debug_code) { | |
772 // Restore a "smi-untagged" heap object. | |
773 __ Or(scratch2, scratch2, Operand(1)); | |
774 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | |
775 __ Assert(eq, kObjectFoundInSmiOnlyArray, at, Operand(scratch2)); | |
776 } | |
777 // mantissa | |
778 __ sw(hole_lower, MemOperand(scratch3, Register::kMantissaOffset)); | |
779 // exponent | |
780 __ sw(hole_upper, MemOperand(scratch3, Register::kExponentOffset)); | |
781 __ Daddu(scratch3, scratch3, kDoubleSize); | |
782 | |
783 __ bind(&entry); | |
784 __ Branch(&loop, lt, scratch3, Operand(array_end)); | |
785 | |
786 __ bind(&done); | |
787 __ pop(ra); | |
788 } | |
789 | |
790 | |
791 void ElementsTransitionGenerator::GenerateDoubleToObject( | |
792 MacroAssembler* masm, | |
793 Register receiver, | |
794 Register key, | |
795 Register value, | |
796 Register target_map, | |
797 AllocationSiteMode mode, | |
798 Label* fail) { | |
799 // Register ra contains the return address. | |
800 Label entry, loop, convert_hole, gc_required, only_change_map; | |
801 Register elements = a4; | |
802 Register array = a6; | |
803 Register length = a5; | |
804 Register scratch = t1; | |
805 | |
806 // Verify input registers don't conflict with locals. | |
807 DCHECK(!AreAliased(receiver, key, value, target_map, | |
808 elements, array, length, scratch)); | |
809 if (mode == TRACK_ALLOCATION_SITE) { | |
810 __ JumpIfJSArrayHasAllocationMemento(receiver, elements, fail); | |
811 } | |
812 | |
813 // Check for empty arrays, which only require a map transition and no changes | |
814 // to the backing store. | |
815 __ ld(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); | |
816 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex); | |
817 __ Branch(&only_change_map, eq, at, Operand(elements)); | |
818 | |
819 __ MultiPush( | |
820 value.bit() | key.bit() | receiver.bit() | target_map.bit() | ra.bit()); | |
821 | |
822 __ ld(length, FieldMemOperand(elements, FixedArray::kLengthOffset)); | |
823 // elements: source FixedArray | |
824 // length: number of elements (smi-tagged) | |
825 | |
826 // Allocate new FixedArray. | |
827 // Re-use value and target_map registers, as they have been saved on the | |
828 // stack. | |
829 Register array_size = value; | |
830 Register allocate_scratch = target_map; | |
831 __ SmiScale(array_size, length, kPointerSizeLog2); | |
832 __ Daddu(array_size, array_size, FixedDoubleArray::kHeaderSize); | |
833 __ Allocate(array_size, array, allocate_scratch, scratch, &gc_required, | |
834 NO_ALLOCATION_FLAGS); | |
835 __ Dsubu(array, array, kHeapObjectTag); | |
836 // array: destination FixedArray, not tagged as heap object | |
837 // Set destination FixedDoubleArray's length and map. | |
838 __ LoadRoot(scratch, Heap::kFixedArrayMapRootIndex); | |
839 __ sd(length, MemOperand(array, FixedDoubleArray::kLengthOffset)); | |
840 __ sd(scratch, MemOperand(array, HeapObject::kMapOffset)); | |
841 | |
842 // Prepare for conversion loop. | |
843 Register src_elements = elements; | |
844 Register dst_elements = target_map; | |
845 Register dst_end = length; | |
846 Register heap_number_map = scratch; | |
847 __ Daddu(src_elements, src_elements, | |
848 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag)); | |
849 __ Daddu(dst_elements, array, Operand(FixedArray::kHeaderSize)); | |
850 __ SmiScale(dst_end, dst_end, kPointerSizeLog2); | |
851 __ Daddu(dst_end, dst_elements, dst_end); | |
852 | |
853 // Allocating heap numbers in the loop below can fail and cause a jump to | |
854 // gc_required. We can't leave a partly initialized FixedArray behind, | |
855 // so pessimistically fill it with holes now. | |
856 Label initialization_loop, initialization_loop_entry; | |
857 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex); | |
858 __ Branch(&initialization_loop_entry); | |
859 __ bind(&initialization_loop); | |
860 __ sd(scratch, MemOperand(dst_elements)); | |
861 __ Daddu(dst_elements, dst_elements, Operand(kPointerSize)); | |
862 __ bind(&initialization_loop_entry); | |
863 __ Branch(&initialization_loop, lt, dst_elements, Operand(dst_end)); | |
864 | |
865 __ Daddu(dst_elements, array, Operand(FixedArray::kHeaderSize)); | |
866 __ Daddu(array, array, Operand(kHeapObjectTag)); | |
867 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); | |
868 // Using offsetted addresses. | |
869 // dst_elements: begin of destination FixedArray element fields, not tagged | |
870 // src_elements: begin of source FixedDoubleArray element fields, not tagged, | |
871 // points to the exponent | |
872 // dst_end: end of destination FixedArray, not tagged | |
873 // array: destination FixedArray | |
874 // heap_number_map: heap number map | |
875 __ Branch(&entry); | |
876 | |
877 // Call into runtime if GC is required. | |
878 __ bind(&gc_required); | |
879 __ MultiPop( | |
880 value.bit() | key.bit() | receiver.bit() | target_map.bit() | ra.bit()); | |
881 | |
882 __ Branch(fail); | |
883 | |
884 __ bind(&loop); | |
885 Register upper_bits = key; | |
886 __ lw(upper_bits, MemOperand(src_elements, Register::kExponentOffset)); | |
887 __ Daddu(src_elements, src_elements, kDoubleSize); | |
888 // upper_bits: current element's upper 32 bit | |
889 // src_elements: address of next element | |
890 __ Branch(&convert_hole, eq, a1, Operand(kHoleNanUpper32)); | |
891 | |
892 // Non-hole double, copy value into a heap number. | |
893 Register heap_number = receiver; | |
894 Register scratch2 = value; | |
895 Register scratch3 = t2; | |
896 __ AllocateHeapNumber(heap_number, scratch2, scratch3, heap_number_map, | |
897 &gc_required); | |
898 // heap_number: new heap number | |
899 // Load current element, src_elements point to next element. | |
900 | |
901 __ ld(scratch2, MemOperand(src_elements, -kDoubleSize)); | |
902 __ sd(scratch2, FieldMemOperand(heap_number, HeapNumber::kValueOffset)); | |
903 | |
904 __ mov(scratch2, dst_elements); | |
905 __ sd(heap_number, MemOperand(dst_elements)); | |
906 __ Daddu(dst_elements, dst_elements, kPointerSize); | |
907 __ RecordWrite(array, | |
908 scratch2, | |
909 heap_number, | |
910 kRAHasBeenSaved, | |
911 kDontSaveFPRegs, | |
912 EMIT_REMEMBERED_SET, | |
913 OMIT_SMI_CHECK); | |
914 __ Branch(&entry); | |
915 | |
916 // Replace the-hole NaN with the-hole pointer. | |
917 __ bind(&convert_hole); | |
918 __ LoadRoot(scratch2, Heap::kTheHoleValueRootIndex); | |
919 __ sd(scratch2, MemOperand(dst_elements)); | |
920 __ Daddu(dst_elements, dst_elements, kPointerSize); | |
921 | |
922 __ bind(&entry); | |
923 __ Branch(&loop, lt, dst_elements, Operand(dst_end)); | |
924 | |
925 __ MultiPop(receiver.bit() | target_map.bit() | value.bit() | key.bit()); | |
926 // Replace receiver's backing store with newly created and filled FixedArray. | |
927 __ sd(array, FieldMemOperand(receiver, JSObject::kElementsOffset)); | |
928 __ RecordWriteField(receiver, | |
929 JSObject::kElementsOffset, | |
930 array, | |
931 scratch, | |
932 kRAHasBeenSaved, | |
933 kDontSaveFPRegs, | |
934 EMIT_REMEMBERED_SET, | |
935 OMIT_SMI_CHECK); | |
936 __ pop(ra); | |
937 | |
938 __ bind(&only_change_map); | |
939 // Update receiver's map. | |
940 __ sd(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
941 __ RecordWriteField(receiver, | |
942 HeapObject::kMapOffset, | |
943 target_map, | |
944 scratch, | |
945 kRAHasNotBeenSaved, | |
946 kDontSaveFPRegs, | |
947 OMIT_REMEMBERED_SET, | |
948 OMIT_SMI_CHECK); | |
949 } | |
950 | |
951 | |
952 void StringCharLoadGenerator::Generate(MacroAssembler* masm, | 610 void StringCharLoadGenerator::Generate(MacroAssembler* masm, |
953 Register string, | 611 Register string, |
954 Register index, | 612 Register index, |
955 Register result, | 613 Register result, |
956 Label* call_runtime) { | 614 Label* call_runtime) { |
957 // Fetch the instance type of the receiver into result register. | 615 // Fetch the instance type of the receiver into result register. |
958 __ ld(result, FieldMemOperand(string, HeapObject::kMapOffset)); | 616 __ ld(result, FieldMemOperand(string, HeapObject::kMapOffset)); |
959 __ lbu(result, FieldMemOperand(result, Map::kInstanceTypeOffset)); | 617 __ lbu(result, FieldMemOperand(result, Map::kInstanceTypeOffset)); |
960 | 618 |
961 // We need special handling for indirect strings. | 619 // We need special handling for indirect strings. |
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1119 } | 777 } |
1120 } | 778 } |
1121 | 779 |
1122 | 780 |
1123 #undef __ | 781 #undef __ |
1124 | 782 |
1125 } // namespace internal | 783 } // namespace internal |
1126 } // namespace v8 | 784 } // namespace v8 |
1127 | 785 |
1128 #endif // V8_TARGET_ARCH_MIPS64 | 786 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |