OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/compiler.h" | 9 #include "vm/compiler.h" |
10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
(...skipping 645 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
656 __ MaybeTraceAllocation(kArrayCid, &slow_case, Assembler::kFarJump)); | 656 __ MaybeTraceAllocation(kArrayCid, &slow_case, Assembler::kFarJump)); |
657 | 657 |
658 const intptr_t fixed_size_plus_alignment_padding = | 658 const intptr_t fixed_size_plus_alignment_padding = |
659 sizeof(RawArray) + kObjectAlignment - 1; | 659 sizeof(RawArray) + kObjectAlignment - 1; |
660 // RDI is a Smi. | 660 // RDI is a Smi. |
661 __ leaq(RDI, Address(RDI, TIMES_4, fixed_size_plus_alignment_padding)); | 661 __ leaq(RDI, Address(RDI, TIMES_4, fixed_size_plus_alignment_padding)); |
662 ASSERT(kSmiTagShift == 1); | 662 ASSERT(kSmiTagShift == 1); |
663 __ andq(RDI, Immediate(-kObjectAlignment)); | 663 __ andq(RDI, Immediate(-kObjectAlignment)); |
664 | 664 |
665 const intptr_t cid = kArrayCid; | 665 const intptr_t cid = kArrayCid; |
666 Heap::Space space = Heap::kNew; | 666 NOT_IN_PRODUCT(Heap::Space space = Heap::kNew); |
667 __ movq(R13, Address(THR, Thread::heap_offset())); | 667 __ movq(RAX, Address(THR, Thread::top_offset())); |
668 __ movq(RAX, Address(R13, Heap::TopOffset(space))); | |
669 | 668 |
670 // RDI: allocation size. | 669 // RDI: allocation size. |
671 __ movq(RCX, RAX); | 670 __ movq(RCX, RAX); |
672 __ addq(RCX, RDI); | 671 __ addq(RCX, RDI); |
673 __ j(CARRY, &slow_case); | 672 __ j(CARRY, &slow_case); |
674 | 673 |
675 // Check if the allocation fits into the remaining space. | 674 // Check if the allocation fits into the remaining space. |
676 // RAX: potential new object start. | 675 // RAX: potential new object start. |
677 // RCX: potential next object start. | 676 // RCX: potential next object start. |
678 // RDI: allocation size. | 677 // RDI: allocation size. |
679 // R13: heap. | 678 __ cmpq(RCX, Address(THR, Thread::end_offset())); |
680 __ cmpq(RCX, Address(R13, Heap::EndOffset(space))); | |
681 __ j(ABOVE_EQUAL, &slow_case); | 679 __ j(ABOVE_EQUAL, &slow_case); |
682 | 680 |
683 // Successfully allocated the object(s), now update top to point to | 681 // Successfully allocated the object(s), now update top to point to |
684 // next object start and initialize the object. | 682 // next object start and initialize the object. |
685 __ movq(Address(R13, Heap::TopOffset(space)), RCX); | 683 __ movq(Address(THR, Thread::top_offset()), RCX); |
686 __ addq(RAX, Immediate(kHeapObjectTag)); | 684 __ addq(RAX, Immediate(kHeapObjectTag)); |
687 NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, RDI, space)); | 685 NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, RDI, space)); |
688 // Initialize the tags. | 686 // Initialize the tags. |
689 // RAX: new object start as a tagged pointer. | 687 // RAX: new object start as a tagged pointer. |
690 // RDI: allocation size. | 688 // RDI: allocation size. |
691 { | 689 { |
692 Label size_tag_overflow, done; | 690 Label size_tag_overflow, done; |
693 __ cmpq(RDI, Immediate(RawObject::SizeTag::kMaxSizeTag)); | 691 __ cmpq(RDI, Immediate(RawObject::SizeTag::kMaxSizeTag)); |
694 __ j(ABOVE, &size_tag_overflow, Assembler::kNearJump); | 692 __ j(ABOVE, &size_tag_overflow, Assembler::kNearJump); |
695 __ shlq(RDI, Immediate(RawObject::kSizeTagPos - kObjectAlignmentLog2)); | 693 __ shlq(RDI, Immediate(RawObject::kSizeTagPos - kObjectAlignmentLog2)); |
(...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
905 __ leaq(R13, Address(R10, TIMES_8, fixed_size_plus_alignment_padding)); | 903 __ leaq(R13, Address(R10, TIMES_8, fixed_size_plus_alignment_padding)); |
906 __ andq(R13, Immediate(-kObjectAlignment)); | 904 __ andq(R13, Immediate(-kObjectAlignment)); |
907 | 905 |
908 // Check for allocation tracing. | 906 // Check for allocation tracing. |
909 NOT_IN_PRODUCT( | 907 NOT_IN_PRODUCT( |
910 __ MaybeTraceAllocation(kContextCid, &slow_case, Assembler::kFarJump)); | 908 __ MaybeTraceAllocation(kContextCid, &slow_case, Assembler::kFarJump)); |
911 | 909 |
912 // Now allocate the object. | 910 // Now allocate the object. |
913 // R10: number of context variables. | 911 // R10: number of context variables. |
914 const intptr_t cid = kContextCid; | 912 const intptr_t cid = kContextCid; |
915 Heap::Space space = Heap::kNew; | 913 NOT_IN_PRODUCT(Heap::Space space = Heap::kNew); |
916 __ movq(RCX, Address(THR, Thread::heap_offset())); | 914 __ movq(RAX, Address(THR, Thread::top_offset())); |
917 __ movq(RAX, Address(RCX, Heap::TopOffset(space))); | |
918 __ addq(R13, RAX); | 915 __ addq(R13, RAX); |
919 // Check if the allocation fits into the remaining space. | 916 // Check if the allocation fits into the remaining space. |
920 // RAX: potential new object. | 917 // RAX: potential new object. |
921 // R13: potential next object start. | 918 // R13: potential next object start. |
922 // R10: number of context variables. | 919 // R10: number of context variables. |
923 // RCX: heap. | 920 __ cmpq(R13, Address(THR, Thread::end_offset())); |
924 __ cmpq(R13, Address(RCX, Heap::EndOffset(space))); | |
925 if (FLAG_use_slow_path) { | 921 if (FLAG_use_slow_path) { |
926 __ jmp(&slow_case); | 922 __ jmp(&slow_case); |
927 } else { | 923 } else { |
928 __ j(ABOVE_EQUAL, &slow_case); | 924 __ j(ABOVE_EQUAL, &slow_case); |
929 } | 925 } |
930 | 926 |
931 // Successfully allocated the object, now update top to point to | 927 // Successfully allocated the object, now update top to point to |
932 // next object start and initialize the object. | 928 // next object start and initialize the object. |
933 // RAX: new object. | 929 // RAX: new object. |
934 // R13: next object start. | 930 // R13: next object start. |
935 // R10: number of context variables. | 931 // R10: number of context variables. |
936 // RCX: heap. | 932 __ movq(Address(THR, Thread::top_offset()), R13); |
937 __ movq(Address(RCX, Heap::TopOffset(space)), R13); | |
938 // R13: Size of allocation in bytes. | 933 // R13: Size of allocation in bytes. |
939 __ subq(R13, RAX); | 934 __ subq(R13, RAX); |
940 __ addq(RAX, Immediate(kHeapObjectTag)); | 935 __ addq(RAX, Immediate(kHeapObjectTag)); |
941 // Generate isolate-independent code to allow sharing between isolates. | 936 // Generate isolate-independent code to allow sharing between isolates. |
942 NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, R13, space)); | 937 NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, R13, space)); |
943 | 938 |
944 // Calculate the size tag. | 939 // Calculate the size tag. |
945 // RAX: new object. | 940 // RAX: new object. |
946 // R10: number of context variables. | 941 // R10: number of context variables. |
947 { | 942 { |
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1106 __ movq(RDX, Address(RSP, kObjectTypeArgumentsOffset)); | 1101 __ movq(RDX, Address(RSP, kObjectTypeArgumentsOffset)); |
1107 // RDX: instantiated type arguments. | 1102 // RDX: instantiated type arguments. |
1108 } | 1103 } |
1109 Isolate* isolate = Isolate::Current(); | 1104 Isolate* isolate = Isolate::Current(); |
1110 if (FLAG_inline_alloc && Heap::IsAllocatableInNewSpace(instance_size) && | 1105 if (FLAG_inline_alloc && Heap::IsAllocatableInNewSpace(instance_size) && |
1111 !cls.TraceAllocation(isolate)) { | 1106 !cls.TraceAllocation(isolate)) { |
1112 Label slow_case; | 1107 Label slow_case; |
1113 // Allocate the object and update top to point to | 1108 // Allocate the object and update top to point to |
1114 // next object start and initialize the allocated object. | 1109 // next object start and initialize the allocated object. |
1115 // RDX: instantiated type arguments (if is_cls_parameterized). | 1110 // RDX: instantiated type arguments (if is_cls_parameterized). |
1116 Heap::Space space = Heap::kNew; | 1111 NOT_IN_PRODUCT(Heap::Space space = Heap::kNew); |
1117 __ movq(RCX, Address(THR, Thread::heap_offset())); | 1112 __ movq(RAX, Address(THR, Thread::top_offset())); |
1118 __ movq(RAX, Address(RCX, Heap::TopOffset(space))); | |
1119 __ leaq(RBX, Address(RAX, instance_size)); | 1113 __ leaq(RBX, Address(RAX, instance_size)); |
1120 // Check if the allocation fits into the remaining space. | 1114 // Check if the allocation fits into the remaining space. |
1121 // RAX: potential new object start. | 1115 // RAX: potential new object start. |
1122 // RBX: potential next object start. | 1116 // RBX: potential next object start. |
1123 // RCX: heap. | 1117 __ cmpq(RBX, Address(THR, Thread::end_offset())); |
1124 __ cmpq(RBX, Address(RCX, Heap::EndOffset(space))); | |
1125 if (FLAG_use_slow_path) { | 1118 if (FLAG_use_slow_path) { |
1126 __ jmp(&slow_case); | 1119 __ jmp(&slow_case); |
1127 } else { | 1120 } else { |
1128 __ j(ABOVE_EQUAL, &slow_case); | 1121 __ j(ABOVE_EQUAL, &slow_case); |
1129 } | 1122 } |
1130 __ movq(Address(RCX, Heap::TopOffset(space)), RBX); | 1123 __ movq(Address(THR, Thread::top_offset()), RBX); |
1131 NOT_IN_PRODUCT(__ UpdateAllocationStats(cls.id(), space)); | 1124 NOT_IN_PRODUCT(__ UpdateAllocationStats(cls.id(), space)); |
1132 | 1125 |
1133 // RAX: new object start (untagged). | 1126 // RAX: new object start (untagged). |
1134 // RBX: next object start. | 1127 // RBX: next object start. |
1135 // RDX: new object type arguments (if is_cls_parameterized). | 1128 // RDX: new object type arguments (if is_cls_parameterized). |
1136 // Set the tags. | 1129 // Set the tags. |
1137 uint32_t tags = 0; | 1130 uint32_t tags = 0; |
1138 tags = RawObject::SizeTag::update(instance_size, tags); | 1131 tags = RawObject::SizeTag::update(instance_size, tags); |
1139 ASSERT(cls.id() != kIllegalCid); | 1132 ASSERT(cls.id() != kIllegalCid); |
1140 tags = RawObject::ClassIdTag::update(cls.id(), tags); | 1133 tags = RawObject::ClassIdTag::update(cls.id(), tags); |
(...skipping 1182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2323 } | 2316 } |
2324 | 2317 |
2325 | 2318 |
2326 void StubCode::GenerateAsynchronousGapMarkerStub(Assembler* assembler) { | 2319 void StubCode::GenerateAsynchronousGapMarkerStub(Assembler* assembler) { |
2327 __ int3(); | 2320 __ int3(); |
2328 } | 2321 } |
2329 | 2322 |
2330 } // namespace dart | 2323 } // namespace dart |
2331 | 2324 |
2332 #endif // defined TARGET_ARCH_X64 | 2325 #endif // defined TARGET_ARCH_X64 |
OLD | NEW |