OLD | NEW |
1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_ARM64) | 6 #if defined(TARGET_ARCH_ARM64) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/compiler.h" | 9 #include "vm/compiler.h" |
10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
(...skipping 704 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
715 // Check for maximum allowed length. | 715 // Check for maximum allowed length. |
716 const intptr_t max_len = | 716 const intptr_t max_len = |
717 reinterpret_cast<intptr_t>(Smi::New(Array::kMaxElements)); | 717 reinterpret_cast<intptr_t>(Smi::New(Array::kMaxElements)); |
718 __ CompareImmediate(R2, max_len); | 718 __ CompareImmediate(R2, max_len); |
719 __ b(&slow_case, GT); | 719 __ b(&slow_case, GT); |
720 | 720 |
721 const intptr_t cid = kArrayCid; | 721 const intptr_t cid = kArrayCid; |
722 NOT_IN_PRODUCT(__ MaybeTraceAllocation(kArrayCid, R4, &slow_case)); | 722 NOT_IN_PRODUCT(__ MaybeTraceAllocation(kArrayCid, R4, &slow_case)); |
723 | 723 |
724 Heap::Space space = Heap::kNew; | 724 Heap::Space space = Heap::kNew; |
725 __ ldr(R8, Address(THR, Thread::heap_offset())); | |
726 | 725 |
727 // Calculate and align allocation size. | 726 // Calculate and align allocation size. |
728 // Load new object start and calculate next object start. | 727 // Load new object start and calculate next object start. |
729 // R1: array element type. | 728 // R1: array element type. |
730 // R2: array length as Smi. | 729 // R2: array length as Smi. |
731 // R8: heap. | 730 // R8: heap. |
732 __ LoadFromOffset(R0, R8, Heap::TopOffset(space)); | 731 __ ldr(R0, Address(THR, Thread::top_offset())); |
733 intptr_t fixed_size_plus_alignment_padding = | 732 intptr_t fixed_size_plus_alignment_padding = |
734 sizeof(RawArray) + kObjectAlignment - 1; | 733 sizeof(RawArray) + kObjectAlignment - 1; |
735 __ LoadImmediate(R3, fixed_size_plus_alignment_padding); | 734 __ LoadImmediate(R3, fixed_size_plus_alignment_padding); |
736 __ add(R3, R3, Operand(R2, LSL, 2)); // R2 is Smi. | 735 __ add(R3, R3, Operand(R2, LSL, 2)); // R2 is Smi. |
737 ASSERT(kSmiTagShift == 1); | 736 ASSERT(kSmiTagShift == 1); |
738 __ andi(R3, R3, Immediate(~(kObjectAlignment - 1))); | 737 __ andi(R3, R3, Immediate(~(kObjectAlignment - 1))); |
739 // R0: potential new object start. | 738 // R0: potential new object start. |
740 // R3: object size in bytes. | 739 // R3: object size in bytes. |
741 __ adds(R7, R3, Operand(R0)); | 740 __ adds(R7, R3, Operand(R0)); |
742 __ b(&slow_case, CS); // Branch if unsigned overflow. | 741 __ b(&slow_case, CS); // Branch if unsigned overflow. |
743 | 742 |
744 // Check if the allocation fits into the remaining space. | 743 // Check if the allocation fits into the remaining space. |
745 // R0: potential new object start. | 744 // R0: potential new object start. |
746 // R1: array element type. | 745 // R1: array element type. |
747 // R2: array length as Smi. | 746 // R2: array length as Smi. |
748 // R3: array size. | 747 // R3: array size. |
749 // R7: potential next object start. | 748 // R7: potential next object start. |
750 // R8: heap. | 749 // R8: heap. |
751 __ LoadFromOffset(TMP, R8, Heap::EndOffset(space)); | 750 __ LoadFromOffset(TMP, THR, Thread::end_offset()); |
752 __ CompareRegisters(R7, TMP); | 751 __ CompareRegisters(R7, TMP); |
753 __ b(&slow_case, CS); // Branch if unsigned higher or equal. | 752 __ b(&slow_case, CS); // Branch if unsigned higher or equal. |
754 | 753 |
755 // Successfully allocated the object(s), now update top to point to | 754 // Successfully allocated the object(s), now update top to point to |
756 // next object start and initialize the object. | 755 // next object start and initialize the object. |
757 // R0: potential new object start. | 756 // R0: potential new object start. |
758 // R3: array size. | 757 // R3: array size. |
759 // R7: potential next object start. | 758 // R7: potential next object start. |
760 // R8: heap. | 759 // R8: heap. |
761 __ StoreToOffset(R7, R8, Heap::TopOffset(space)); | 760 __ str(R7, Address(THR, Thread::top_offset())); |
762 __ add(R0, R0, Operand(kHeapObjectTag)); | 761 __ add(R0, R0, Operand(kHeapObjectTag)); |
763 NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, R3, space)); | 762 NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, R3, space)); |
764 | 763 |
765 // R0: new object start as a tagged pointer. | 764 // R0: new object start as a tagged pointer. |
766 // R1: array element type. | 765 // R1: array element type. |
767 // R2: array length as Smi. | 766 // R2: array length as Smi. |
768 // R3: array size. | 767 // R3: array size. |
769 // R7: new object end address. | 768 // R7: new object end address. |
770 | 769 |
771 // Store the type argument field. | 770 // Store the type argument field. |
(...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
984 __ add(R2, R2, Operand(R1, LSL, 3)); | 983 __ add(R2, R2, Operand(R1, LSL, 3)); |
985 ASSERT(kSmiTagShift == 1); | 984 ASSERT(kSmiTagShift == 1); |
986 __ andi(R2, R2, Immediate(~(kObjectAlignment - 1))); | 985 __ andi(R2, R2, Immediate(~(kObjectAlignment - 1))); |
987 | 986 |
988 NOT_IN_PRODUCT(__ MaybeTraceAllocation(kContextCid, R4, &slow_case)); | 987 NOT_IN_PRODUCT(__ MaybeTraceAllocation(kContextCid, R4, &slow_case)); |
989 // Now allocate the object. | 988 // Now allocate the object. |
990 // R1: number of context variables. | 989 // R1: number of context variables. |
991 // R2: object size. | 990 // R2: object size. |
992 const intptr_t cid = kContextCid; | 991 const intptr_t cid = kContextCid; |
993 Heap::Space space = Heap::kNew; | 992 Heap::Space space = Heap::kNew; |
994 __ ldr(R5, Address(THR, Thread::heap_offset())); | 993 __ ldr(R0, Address(THR, Thread::top_offset())); |
995 __ ldr(R0, Address(R5, Heap::TopOffset(space))); | |
996 __ add(R3, R2, Operand(R0)); | 994 __ add(R3, R2, Operand(R0)); |
997 // Check if the allocation fits into the remaining space. | 995 // Check if the allocation fits into the remaining space. |
998 // R0: potential new object. | 996 // R0: potential new object. |
999 // R1: number of context variables. | 997 // R1: number of context variables. |
1000 // R2: object size. | 998 // R2: object size. |
1001 // R3: potential next object start. | 999 // R3: potential next object start. |
1002 // R5: heap. | 1000 // R5: heap. |
1003 __ ldr(TMP, Address(R5, Heap::EndOffset(space))); | 1001 __ ldr(TMP, Address(THR, Thread::end_offset())); |
1004 __ CompareRegisters(R3, TMP); | 1002 __ CompareRegisters(R3, TMP); |
1005 if (FLAG_use_slow_path) { | 1003 if (FLAG_use_slow_path) { |
1006 __ b(&slow_case); | 1004 __ b(&slow_case); |
1007 } else { | 1005 } else { |
1008 __ b(&slow_case, CS); // Branch if unsigned higher or equal. | 1006 __ b(&slow_case, CS); // Branch if unsigned higher or equal. |
1009 } | 1007 } |
1010 | 1008 |
1011 // Successfully allocated the object, now update top to point to | 1009 // Successfully allocated the object, now update top to point to |
1012 // next object start and initialize the object. | 1010 // next object start and initialize the object. |
1013 // R0: new object. | 1011 // R0: new object. |
1014 // R1: number of context variables. | 1012 // R1: number of context variables. |
1015 // R2: object size. | 1013 // R2: object size. |
1016 // R3: next object start. | 1014 // R3: next object start. |
1017 // R5: heap. | 1015 // R5: heap. |
1018 __ str(R3, Address(R5, Heap::TopOffset(space))); | 1016 __ str(R3, Address(THR, Thread::top_offset())); |
1019 __ add(R0, R0, Operand(kHeapObjectTag)); | 1017 __ add(R0, R0, Operand(kHeapObjectTag)); |
1020 NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, R2, space)); | 1018 NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, R2, space)); |
1021 | 1019 |
1022 // Calculate the size tag. | 1020 // Calculate the size tag. |
1023 // R0: new object. | 1021 // R0: new object. |
1024 // R1: number of context variables. | 1022 // R1: number of context variables. |
1025 // R2: object size. | 1023 // R2: object size. |
1026 const intptr_t shift = RawObject::kSizeTagPos - kObjectAlignmentLog2; | 1024 const intptr_t shift = RawObject::kSizeTagPos - kObjectAlignmentLog2; |
1027 __ CompareImmediate(R2, RawObject::SizeTag::kMaxSizeTag); | 1025 __ CompareImmediate(R2, RawObject::SizeTag::kMaxSizeTag); |
1028 // If no size tag overflow, shift R2 left, else set R2 to zero. | 1026 // If no size tag overflow, shift R2 left, else set R2 to zero. |
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1168 // R1: instantiated type arguments. | 1166 // R1: instantiated type arguments. |
1169 } | 1167 } |
1170 Isolate* isolate = Isolate::Current(); | 1168 Isolate* isolate = Isolate::Current(); |
1171 if (FLAG_inline_alloc && Heap::IsAllocatableInNewSpace(instance_size) && | 1169 if (FLAG_inline_alloc && Heap::IsAllocatableInNewSpace(instance_size) && |
1172 !cls.TraceAllocation(isolate)) { | 1170 !cls.TraceAllocation(isolate)) { |
1173 Label slow_case; | 1171 Label slow_case; |
1174 // Allocate the object and update top to point to | 1172 // Allocate the object and update top to point to |
1175 // next object start and initialize the allocated object. | 1173 // next object start and initialize the allocated object. |
1176 // R1: instantiated type arguments (if is_cls_parameterized). | 1174 // R1: instantiated type arguments (if is_cls_parameterized). |
1177 Heap::Space space = Heap::kNew; | 1175 Heap::Space space = Heap::kNew; |
1178 __ ldr(R5, Address(THR, Thread::heap_offset())); | 1176 __ ldr(R2, Address(THR, Thread::top_offset())); |
1179 __ ldr(R2, Address(R5, Heap::TopOffset(space))); | |
1180 __ AddImmediate(R3, R2, instance_size); | 1177 __ AddImmediate(R3, R2, instance_size); |
1181 // Check if the allocation fits into the remaining space. | 1178 // Check if the allocation fits into the remaining space. |
1182 // R2: potential new object start. | 1179 // R2: potential new object start. |
1183 // R3: potential next object start. | 1180 // R3: potential next object start. |
1184 // R5: heap. | 1181 // R5: heap. |
1185 __ ldr(TMP, Address(R5, Heap::EndOffset(space))); | 1182 __ ldr(TMP, Address(THR, Thread::end_offset())); |
1186 __ CompareRegisters(R3, TMP); | 1183 __ CompareRegisters(R3, TMP); |
1187 if (FLAG_use_slow_path) { | 1184 if (FLAG_use_slow_path) { |
1188 __ b(&slow_case); | 1185 __ b(&slow_case); |
1189 } else { | 1186 } else { |
1190 __ b(&slow_case, CS); // Unsigned higher or equal. | 1187 __ b(&slow_case, CS); // Unsigned higher or equal. |
1191 } | 1188 } |
1192 __ str(R3, Address(R5, Heap::TopOffset(space))); | 1189 __ str(R3, Address(THR, Thread::top_offset())); |
1193 NOT_IN_PRODUCT(__ UpdateAllocationStats(cls.id(), space)); | 1190 NOT_IN_PRODUCT(__ UpdateAllocationStats(cls.id(), space)); |
1194 | 1191 |
1195 // R2: new object start. | 1192 // R2: new object start. |
1196 // R3: next object start. | 1193 // R3: next object start. |
1197 // R1: new object type arguments (if is_cls_parameterized). | 1194 // R1: new object type arguments (if is_cls_parameterized). |
1198 // Set the tags. | 1195 // Set the tags. |
1199 uword tags = 0; | 1196 uword tags = 0; |
1200 tags = RawObject::SizeTag::update(instance_size, tags); | 1197 tags = RawObject::SizeTag::update(instance_size, tags); |
1201 ASSERT(cls.id() != kIllegalCid); | 1198 ASSERT(cls.id() != kIllegalCid); |
1202 tags = RawObject::ClassIdTag::update(cls.id(), tags); | 1199 tags = RawObject::ClassIdTag::update(cls.id(), tags); |
(...skipping 1174 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2377 } | 2374 } |
2378 | 2375 |
2379 | 2376 |
2380 void StubCode::GenerateAsynchronousGapMarkerStub(Assembler* assembler) { | 2377 void StubCode::GenerateAsynchronousGapMarkerStub(Assembler* assembler) { |
2381 __ brk(0); | 2378 __ brk(0); |
2382 } | 2379 } |
2383 | 2380 |
2384 } // namespace dart | 2381 } // namespace dart |
2385 | 2382 |
2386 #endif // defined TARGET_ARCH_ARM64 | 2383 #endif // defined TARGET_ARCH_ARM64 |
OLD | NEW |