OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_ARM) | 6 #if defined(TARGET_ARCH_ARM) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/code_generator.h" | 9 #include "vm/code_generator.h" |
10 #include "vm/cpu.h" | 10 #include "vm/cpu.h" |
(...skipping 611 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
622 __ cmp(R3, Operand(0)); | 622 __ cmp(R3, Operand(0)); |
623 __ b(&slow_case, LT); | 623 __ b(&slow_case, LT); |
624 | 624 |
625 // Check for maximum allowed length. | 625 // Check for maximum allowed length. |
626 const intptr_t max_len = | 626 const intptr_t max_len = |
627 reinterpret_cast<int32_t>(Smi::New(Array::kMaxElements)); | 627 reinterpret_cast<int32_t>(Smi::New(Array::kMaxElements)); |
628 __ CompareImmediate(R3, max_len); | 628 __ CompareImmediate(R3, max_len); |
629 __ b(&slow_case, GT); | 629 __ b(&slow_case, GT); |
630 | 630 |
631 const intptr_t fixed_size = sizeof(RawArray) + kObjectAlignment - 1; | 631 const intptr_t fixed_size = sizeof(RawArray) + kObjectAlignment - 1; |
632 __ LoadImmediate(R8, fixed_size); | 632 __ LoadImmediate(R9, fixed_size); |
633 __ add(R8, R8, Operand(R3, LSL, 1)); // R3 is a Smi. | 633 __ add(R9, R9, Operand(R3, LSL, 1)); // R3 is a Smi. |
634 ASSERT(kSmiTagShift == 1); | 634 ASSERT(kSmiTagShift == 1); |
635 __ bic(R8, R8, Operand(kObjectAlignment - 1)); | 635 __ bic(R9, R9, Operand(kObjectAlignment - 1)); |
636 | 636 |
637 // R8: Allocation size. | 637 // R9: Allocation size. |
638 | 638 |
639 Isolate* isolate = Isolate::Current(); | 639 Isolate* isolate = Isolate::Current(); |
640 Heap* heap = isolate->heap(); | 640 Heap* heap = isolate->heap(); |
641 const intptr_t cid = kArrayCid; | 641 const intptr_t cid = kArrayCid; |
642 Heap::Space space = heap->SpaceForAllocation(cid); | 642 Heap::Space space = heap->SpaceForAllocation(cid); |
643 __ LoadImmediate(R6, heap->TopAddress(space)); | 643 __ LoadImmediate(R6, heap->TopAddress(space)); |
644 __ ldr(R0, Address(R6, 0)); // Potential new object start. | 644 __ ldr(R0, Address(R6, 0)); // Potential new object start. |
645 __ adds(R7, R0, Operand(R8)); // Potential next object start. | 645 __ adds(R7, R0, Operand(R9)); // Potential next object start. |
646 __ b(&slow_case, CS); // Branch if unsigned overflow. | 646 __ b(&slow_case, CS); // Branch if unsigned overflow. |
647 | 647 |
648 // Check if the allocation fits into the remaining space. | 648 // Check if the allocation fits into the remaining space. |
649 // R0: potential new object start. | 649 // R0: potential new object start. |
650 // R7: potential next object start. | 650 // R7: potential next object start. |
651 // R8: allocation size. | 651 // R9: allocation size. |
652 __ LoadImmediate(R3, heap->EndAddress(space)); | 652 __ LoadImmediate(R3, heap->EndAddress(space)); |
653 __ ldr(R3, Address(R3, 0)); | 653 __ ldr(R3, Address(R3, 0)); |
654 __ cmp(R7, Operand(R3)); | 654 __ cmp(R7, Operand(R3)); |
655 __ b(&slow_case, CS); | 655 __ b(&slow_case, CS); |
656 | 656 |
657 // Successfully allocated the object(s), now update top to point to | 657 // Successfully allocated the object(s), now update top to point to |
658 // next object start and initialize the object. | 658 // next object start and initialize the object. |
659 __ LoadAllocationStatsAddress(R3, cid, space); | 659 __ LoadAllocationStatsAddress(R3, cid, space); |
660 __ str(R7, Address(R6, 0)); | 660 __ str(R7, Address(R6, 0)); |
661 __ add(R0, R0, Operand(kHeapObjectTag)); | 661 __ add(R0, R0, Operand(kHeapObjectTag)); |
662 | 662 |
663 // Initialize the tags. | 663 // Initialize the tags. |
664 // R0: new object start as a tagged pointer. | 664 // R0: new object start as a tagged pointer. |
665 // R3: allocation stats address. | 665 // R3: allocation stats address. |
666 // R7: new object end address. | 666 // R7: new object end address. |
667 // R8: allocation size. | 667 // R9: allocation size. |
668 { | 668 { |
669 const intptr_t shift = RawObject::kSizeTagPos - kObjectAlignmentLog2; | 669 const intptr_t shift = RawObject::kSizeTagPos - kObjectAlignmentLog2; |
670 | 670 |
671 __ CompareImmediate(R8, RawObject::SizeTag::kMaxSizeTag); | 671 __ CompareImmediate(R9, RawObject::SizeTag::kMaxSizeTag); |
672 __ mov(R6, Operand(R8, LSL, shift), LS); | 672 __ mov(R6, Operand(R9, LSL, shift), LS); |
673 __ mov(R6, Operand(0), HI); | 673 __ mov(R6, Operand(0), HI); |
674 | 674 |
675 // Get the class index and insert it into the tags. | 675 // Get the class index and insert it into the tags. |
676 // R6: size and bit tags. | 676 // R6: size and bit tags. |
677 __ LoadImmediate(TMP, RawObject::ClassIdTag::encode(cid)); | 677 __ LoadImmediate(TMP, RawObject::ClassIdTag::encode(cid)); |
678 __ orr(R6, R6, Operand(TMP)); | 678 __ orr(R6, R6, Operand(TMP)); |
679 __ str(R6, FieldAddress(R0, Array::tags_offset())); // Store tags. | 679 __ str(R6, FieldAddress(R0, Array::tags_offset())); // Store tags. |
680 } | 680 } |
681 | 681 |
682 // R0: new object start as a tagged pointer. | 682 // R0: new object start as a tagged pointer. |
683 // R7: new object end address. | 683 // R7: new object end address. |
684 // Store the type argument field. | 684 // Store the type argument field. |
685 __ InitializeFieldNoBarrier(R0, | 685 __ InitializeFieldNoBarrier(R0, |
686 FieldAddress(R0, Array::type_arguments_offset()), | 686 FieldAddress(R0, Array::type_arguments_offset()), |
687 R1); | 687 R1); |
688 | 688 |
689 // Set the length field. | 689 // Set the length field. |
690 __ InitializeFieldNoBarrier(R0, | 690 __ InitializeFieldNoBarrier(R0, |
691 FieldAddress(R0, Array::length_offset()), | 691 FieldAddress(R0, Array::length_offset()), |
692 R2); | 692 R2); |
693 | 693 |
694 // Initialize all array elements to raw_null. | 694 // Initialize all array elements to raw_null. |
695 // R0: new object start as a tagged pointer. | 695 // R0: new object start as a tagged pointer. |
696 // R3: allocation stats address. | 696 // R3: allocation stats address. |
697 // R4, R5: null | 697 // R4, R5: null |
698 // R6: iterator which initially points to the start of the variable | 698 // R6: iterator which initially points to the start of the variable |
699 // data area to be initialized. | 699 // data area to be initialized. |
700 // R7: new object end address. | 700 // R7: new object end address. |
701 // R8: allocation size. | 701 // R9: allocation size. |
702 | 702 |
703 __ LoadImmediate(R4, reinterpret_cast<intptr_t>(Object::null())); | 703 __ LoadImmediate(R4, reinterpret_cast<intptr_t>(Object::null())); |
704 __ mov(R5, Operand(R4)); | 704 __ mov(R5, Operand(R4)); |
705 __ AddImmediate(R6, R0, sizeof(RawArray) - kHeapObjectTag); | 705 __ AddImmediate(R6, R0, sizeof(RawArray) - kHeapObjectTag); |
706 __ InitializeFieldsNoBarrier(R0, R6, R7, R4, R5); | 706 __ InitializeFieldsNoBarrier(R0, R6, R7, R4, R5); |
707 __ IncrementAllocationStatsWithSize(R3, R8, cid, space); | 707 __ IncrementAllocationStatsWithSize(R3, R9, cid, space); |
708 __ Ret(); // Returns the newly allocated object in R0. | 708 __ Ret(); // Returns the newly allocated object in R0. |
709 // Unable to allocate the array using the fast inline code, just call | 709 // Unable to allocate the array using the fast inline code, just call |
710 // into the runtime. | 710 // into the runtime. |
711 __ Bind(&slow_case); | 711 __ Bind(&slow_case); |
712 | 712 |
713 // Create a stub frame as we are pushing some objects on the stack before | 713 // Create a stub frame as we are pushing some objects on the stack before |
714 // calling into the runtime. | 714 // calling into the runtime. |
715 __ EnterStubFrame(); | 715 __ EnterStubFrame(); |
716 __ LoadImmediate(IP, reinterpret_cast<intptr_t>(Object::null())); | 716 __ LoadImmediate(IP, reinterpret_cast<intptr_t>(Object::null())); |
717 // Setup space on stack for return value. | 717 // Setup space on stack for return value. |
(...skipping 10 matching lines...) Expand all Loading... |
728 __ BranchPatchable(&stub_code->FixAllocateArrayStubTargetLabel()); | 728 __ BranchPatchable(&stub_code->FixAllocateArrayStubTargetLabel()); |
729 } | 729 } |
730 | 730 |
731 | 731 |
732 // Called when invoking Dart code from C++ (VM code). | 732 // Called when invoking Dart code from C++ (VM code). |
733 // Input parameters: | 733 // Input parameters: |
734 // LR : points to return address. | 734 // LR : points to return address. |
735 // R0 : entrypoint of the Dart function to call. | 735 // R0 : entrypoint of the Dart function to call. |
736 // R1 : arguments descriptor array. | 736 // R1 : arguments descriptor array. |
737 // R2 : arguments array. | 737 // R2 : arguments array. |
738 // R3 : new context containing the current isolate pointer. | 738 // R3 : current thread. |
739 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) { | 739 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) { |
740 // Save frame pointer coming in. | 740 // Save frame pointer coming in. |
741 __ EnterFrame((1 << FP) | (1 << LR), 0); | 741 __ EnterFrame((1 << FP) | (1 << LR), 0); |
742 | 742 |
743 // Save new context and C++ ABI callee-saved registers. | 743 // Save new context and C++ ABI callee-saved registers. |
744 __ PushList(kAbiPreservedCpuRegs); | 744 __ PushList(kAbiPreservedCpuRegs); |
745 | 745 |
746 const DRegister firstd = EvenDRegisterOf(kAbiFirstPreservedFpuReg); | 746 const DRegister firstd = EvenDRegisterOf(kAbiFirstPreservedFpuReg); |
747 if (TargetCPUFeatures::vfp_supported()) { | 747 if (TargetCPUFeatures::vfp_supported()) { |
748 ASSERT(2 * kAbiPreservedFpuRegCount < 16); | 748 ASSERT(2 * kAbiPreservedFpuRegCount < 16); |
749 // Save FPU registers. 2 D registers per Q register. | 749 // Save FPU registers. 2 D registers per Q register. |
750 __ vstmd(DB_W, SP, firstd, 2 * kAbiPreservedFpuRegCount); | 750 __ vstmd(DB_W, SP, firstd, 2 * kAbiPreservedFpuRegCount); |
751 } else { | 751 } else { |
752 __ sub(SP, SP, Operand(kAbiPreservedFpuRegCount * kFpuRegisterSize)); | 752 __ sub(SP, SP, Operand(kAbiPreservedFpuRegCount * kFpuRegisterSize)); |
753 } | 753 } |
754 | 754 |
755 // We now load the pool pointer(PP) as we are about to invoke dart code and we | 755 // We now load the pool pointer(PP) as we are about to invoke dart code and we |
756 // could potentially invoke some intrinsic functions which need the PP to be | 756 // could potentially invoke some intrinsic functions which need the PP to be |
757 // set up. | 757 // set up. |
758 __ LoadPoolPointer(); | 758 __ LoadPoolPointer(); |
759 | 759 |
760 __ LoadIsolate(R8); | 760 // Set up THR, which caches the current thread in Dart code. |
| 761 if (THR != R3) { |
| 762 __ mov(THR, Operand(R3)); |
| 763 } |
| 764 __ LoadIsolate(R9); |
761 | 765 |
762 // Save the current VMTag on the stack. | 766 // Save the current VMTag on the stack. |
763 __ LoadFromOffset(kWord, R5, R8, Isolate::vm_tag_offset()); | 767 __ LoadFromOffset(kWord, R5, R9, Isolate::vm_tag_offset()); |
764 __ Push(R5); | 768 __ Push(R5); |
765 | 769 |
766 // Mark that the isolate is executing Dart code. | 770 // Mark that the isolate is executing Dart code. |
767 __ LoadImmediate(R5, VMTag::kDartTagId); | 771 __ LoadImmediate(R5, VMTag::kDartTagId); |
768 __ StoreToOffset(kWord, R5, R8, Isolate::vm_tag_offset()); | 772 __ StoreToOffset(kWord, R5, R9, Isolate::vm_tag_offset()); |
769 | 773 |
770 // Save top resource and top exit frame info. Use R4-6 as temporary registers. | 774 // Save top resource and top exit frame info. Use R4-6 as temporary registers. |
771 // StackFrameIterator reads the top exit frame info saved in this frame. | 775 // StackFrameIterator reads the top exit frame info saved in this frame. |
772 __ LoadFromOffset(kWord, R5, R8, Isolate::top_exit_frame_info_offset()); | 776 __ LoadFromOffset(kWord, R5, R9, Isolate::top_exit_frame_info_offset()); |
773 __ LoadFromOffset(kWord, R4, R8, Isolate::top_resource_offset()); | 777 __ LoadFromOffset(kWord, R4, R9, Isolate::top_resource_offset()); |
774 __ LoadImmediate(R6, 0); | 778 __ LoadImmediate(R6, 0); |
775 __ StoreToOffset(kWord, R6, R8, Isolate::top_resource_offset()); | 779 __ StoreToOffset(kWord, R6, R9, Isolate::top_resource_offset()); |
776 __ StoreToOffset(kWord, R6, R8, Isolate::top_exit_frame_info_offset()); | 780 __ StoreToOffset(kWord, R6, R9, Isolate::top_exit_frame_info_offset()); |
777 | 781 |
778 // kExitLinkSlotFromEntryFp must be kept in sync with the code below. | 782 // kExitLinkSlotFromEntryFp must be kept in sync with the code below. |
779 __ Push(R4); | 783 __ Push(R4); |
780 ASSERT(kExitLinkSlotFromEntryFp == -26); | 784 ASSERT(kExitLinkSlotFromEntryFp == -26); |
781 __ Push(R5); | 785 __ Push(R5); |
782 | 786 |
783 // Load arguments descriptor array into R4, which is passed to Dart code. | 787 // Load arguments descriptor array into R4, which is passed to Dart code. |
784 __ ldr(R4, Address(R1, VMHandles::kOffsetOfRawPtrInHandle)); | 788 __ ldr(R4, Address(R1, VMHandles::kOffsetOfRawPtrInHandle)); |
785 | 789 |
786 // Load number of arguments into R5. | 790 // Load number of arguments into R5. |
(...skipping 18 matching lines...) Expand all Loading... |
805 __ cmp(R1, Operand(R5)); | 809 __ cmp(R1, Operand(R5)); |
806 __ b(&push_arguments, LT); | 810 __ b(&push_arguments, LT); |
807 __ Bind(&done_push_arguments); | 811 __ Bind(&done_push_arguments); |
808 | 812 |
809 // Call the Dart code entrypoint. | 813 // Call the Dart code entrypoint. |
810 __ blx(R0); // R4 is the arguments descriptor array. | 814 __ blx(R0); // R4 is the arguments descriptor array. |
811 | 815 |
812 // Get rid of arguments pushed on the stack. | 816 // Get rid of arguments pushed on the stack. |
813 __ AddImmediate(SP, FP, kExitLinkSlotFromEntryFp * kWordSize); | 817 __ AddImmediate(SP, FP, kExitLinkSlotFromEntryFp * kWordSize); |
814 | 818 |
815 __ LoadIsolate(R8); | 819 __ LoadIsolate(R9); |
816 // Restore the saved top exit frame info and top resource back into the | 820 // Restore the saved top exit frame info and top resource back into the |
817 // Isolate structure. Uses R5 as a temporary register for this. | 821 // Isolate structure. Uses R5 as a temporary register for this. |
818 __ Pop(R5); | 822 __ Pop(R5); |
819 __ StoreToOffset(kWord, R5, R8, Isolate::top_exit_frame_info_offset()); | 823 __ StoreToOffset(kWord, R5, R9, Isolate::top_exit_frame_info_offset()); |
820 __ Pop(R5); | 824 __ Pop(R5); |
821 __ StoreToOffset(kWord, R5, R8, Isolate::top_resource_offset()); | 825 __ StoreToOffset(kWord, R5, R9, Isolate::top_resource_offset()); |
822 | 826 |
823 // Restore the current VMTag from the stack. | 827 // Restore the current VMTag from the stack. |
824 __ Pop(R4); | 828 __ Pop(R4); |
825 __ StoreToOffset(kWord, R4, R8, Isolate::vm_tag_offset()); | 829 __ StoreToOffset(kWord, R4, R9, Isolate::vm_tag_offset()); |
826 | 830 |
827 // Restore C++ ABI callee-saved registers. | 831 // Restore C++ ABI callee-saved registers. |
828 if (TargetCPUFeatures::vfp_supported()) { | 832 if (TargetCPUFeatures::vfp_supported()) { |
829 // Restore FPU registers. 2 D registers per Q register. | 833 // Restore FPU registers. 2 D registers per Q register. |
830 __ vldmd(IA_W, SP, firstd, 2 * kAbiPreservedFpuRegCount); | 834 __ vldmd(IA_W, SP, firstd, 2 * kAbiPreservedFpuRegCount); |
831 } else { | 835 } else { |
832 __ AddImmediate(SP, kAbiPreservedFpuRegCount * kFpuRegisterSize); | 836 __ AddImmediate(SP, kAbiPreservedFpuRegCount * kFpuRegisterSize); |
833 } | 837 } |
834 // Restore CPU registers. | 838 // Restore CPU registers. |
835 __ PopList(kAbiPreservedCpuRegs); | 839 __ PopList(kAbiPreservedCpuRegs); |
(...skipping 1081 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1917 void StubCode::GenerateJumpToExceptionHandlerStub(Assembler* assembler) { | 1921 void StubCode::GenerateJumpToExceptionHandlerStub(Assembler* assembler) { |
1918 ASSERT(kExceptionObjectReg == R0); | 1922 ASSERT(kExceptionObjectReg == R0); |
1919 ASSERT(kStackTraceObjectReg == R1); | 1923 ASSERT(kStackTraceObjectReg == R1); |
1920 __ mov(IP, Operand(R1)); // Copy Stack pointer into IP. | 1924 __ mov(IP, Operand(R1)); // Copy Stack pointer into IP. |
1921 __ mov(LR, Operand(R0)); // Program counter. | 1925 __ mov(LR, Operand(R0)); // Program counter. |
1922 __ mov(R0, Operand(R3)); // Exception object. | 1926 __ mov(R0, Operand(R3)); // Exception object. |
1923 __ ldr(R1, Address(SP, 0)); // StackTrace object. | 1927 __ ldr(R1, Address(SP, 0)); // StackTrace object. |
1924 __ ldr(R3, Address(SP, 4)); // Isolate. | 1928 __ ldr(R3, Address(SP, 4)); // Isolate. |
1925 __ mov(FP, Operand(R2)); // Frame_pointer. | 1929 __ mov(FP, Operand(R2)); // Frame_pointer. |
1926 __ mov(SP, Operand(IP)); // Set Stack pointer. | 1930 __ mov(SP, Operand(IP)); // Set Stack pointer. |
| 1931 // TODO(koda): Pass thread instead of isolate. |
| 1932 __ LoadFromOffset(kWord, THR, R3, Isolate::mutator_thread_offset()); |
1927 // Set the tag. | 1933 // Set the tag. |
1928 __ LoadImmediate(R2, VMTag::kDartTagId); | 1934 __ LoadImmediate(R2, VMTag::kDartTagId); |
1929 __ StoreToOffset(kWord, R2, R3, Isolate::vm_tag_offset()); | 1935 __ StoreToOffset(kWord, R2, R3, Isolate::vm_tag_offset()); |
1930 // Clear top exit frame. | 1936 // Clear top exit frame. |
1931 __ LoadImmediate(R2, 0); | 1937 __ LoadImmediate(R2, 0); |
1932 __ StoreToOffset(kWord, R2, R3, Isolate::top_exit_frame_info_offset()); | 1938 __ StoreToOffset(kWord, R2, R3, Isolate::top_exit_frame_info_offset()); |
1933 __ bx(LR); // Jump to the exception handler code. | 1939 __ bx(LR); // Jump to the exception handler code. |
1934 } | 1940 } |
1935 | 1941 |
1936 | 1942 |
(...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2130 // Result: | 2136 // Result: |
2131 // R1: entry point. | 2137 // R1: entry point. |
2132 void StubCode::GenerateMegamorphicLookupStub(Assembler* assembler) { | 2138 void StubCode::GenerateMegamorphicLookupStub(Assembler* assembler) { |
2133 EmitMegamorphicLookup(assembler, R0, R1, R1); | 2139 EmitMegamorphicLookup(assembler, R0, R1, R1); |
2134 __ Ret(); | 2140 __ Ret(); |
2135 } | 2141 } |
2136 | 2142 |
2137 } // namespace dart | 2143 } // namespace dart |
2138 | 2144 |
2139 #endif // defined TARGET_ARCH_ARM | 2145 #endif // defined TARGET_ARCH_ARM |
OLD | NEW |