OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_ARM) | 6 #if defined(TARGET_ARCH_ARM) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/code_generator.h" | 9 #include "vm/code_generator.h" |
10 #include "vm/cpu.h" | 10 #include "vm/cpu.h" |
(...skipping 614 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
625 // Called for inline allocation of arrays. | 625 // Called for inline allocation of arrays. |
626 // Input parameters: | 626 // Input parameters: |
627 // LR: return address. | 627 // LR: return address. |
628 // R1: array element type (either NULL or an instantiated type). | 628 // R1: array element type (either NULL or an instantiated type). |
629 // R2: array length as Smi (must be preserved). | 629 // R2: array length as Smi (must be preserved). |
630 // The newly allocated object is returned in R0. | 630 // The newly allocated object is returned in R0. |
631 void StubCode::GeneratePatchableAllocateArrayStub(Assembler* assembler, | 631 void StubCode::GeneratePatchableAllocateArrayStub(Assembler* assembler, |
632 uword* entry_patch_offset, uword* patch_code_pc_offset) { | 632 uword* entry_patch_offset, uword* patch_code_pc_offset) { |
633 *entry_patch_offset = assembler->CodeSize(); | 633 *entry_patch_offset = assembler->CodeSize(); |
634 Label slow_case; | 634 Label slow_case; |
635 | 635 Isolate* isolate = Isolate::Current(); |
636 // Compute the size to be allocated, it is based on the array length | 636 // Compute the size to be allocated, it is based on the array length |
637 // and is computed as: | 637 // and is computed as: |
638 // RoundedAllocationSize((array_length * kwordSize) + sizeof(RawArray)). | 638 // RoundedAllocationSize((array_length * kwordSize) + sizeof(RawArray)). |
639 __ MoveRegister(R3, R2); // Array length. | 639 __ MoveRegister(R3, R2); // Array length. |
640 | 640 const Class& cls = Class::Handle(isolate->object_store()->array_class()); |
| 641 ASSERT(!cls.IsNull()); |
641 // Check that length is a positive Smi. | 642 // Check that length is a positive Smi. |
642 __ tst(R3, Operand(kSmiTagMask)); | 643 __ tst(R3, Operand(kSmiTagMask)); |
643 __ b(&slow_case, NE); | 644 if (FLAG_use_slow_path || cls.trace_allocation()) { |
| 645 __ b(&slow_case); |
| 646 } else { |
| 647 __ b(&slow_case, NE); |
| 648 } |
644 __ cmp(R3, Operand(0)); | 649 __ cmp(R3, Operand(0)); |
645 __ b(&slow_case, LT); | 650 __ b(&slow_case, LT); |
646 | 651 |
647 // Check for maximum allowed length. | 652 // Check for maximum allowed length. |
648 const intptr_t max_len = | 653 const intptr_t max_len = |
649 reinterpret_cast<int32_t>(Smi::New(Array::kMaxElements)); | 654 reinterpret_cast<int32_t>(Smi::New(Array::kMaxElements)); |
650 __ CompareImmediate(R3, max_len); | 655 __ CompareImmediate(R3, max_len); |
651 __ b(&slow_case, GT); | 656 __ b(&slow_case, GT); |
652 | 657 |
653 const intptr_t fixed_size = sizeof(RawArray) + kObjectAlignment - 1; | 658 const intptr_t fixed_size = sizeof(RawArray) + kObjectAlignment - 1; |
654 __ LoadImmediate(R9, fixed_size); | 659 __ LoadImmediate(R9, fixed_size); |
655 __ add(R9, R9, Operand(R3, LSL, 1)); // R3 is a Smi. | 660 __ add(R9, R9, Operand(R3, LSL, 1)); // R3 is a Smi. |
656 ASSERT(kSmiTagShift == 1); | 661 ASSERT(kSmiTagShift == 1); |
657 __ bic(R9, R9, Operand(kObjectAlignment - 1)); | 662 __ bic(R9, R9, Operand(kObjectAlignment - 1)); |
658 | 663 |
659 // R9: Allocation size. | 664 // R9: Allocation size. |
660 | 665 |
661 Isolate* isolate = Isolate::Current(); | |
662 Heap* heap = isolate->heap(); | 666 Heap* heap = isolate->heap(); |
663 const intptr_t cid = kArrayCid; | 667 const intptr_t cid = kArrayCid; |
664 Heap::Space space = heap->SpaceForAllocation(cid); | 668 Heap::Space space = heap->SpaceForAllocation(cid); |
665 __ LoadImmediate(R6, heap->TopAddress(space)); | 669 __ LoadImmediate(R6, heap->TopAddress(space)); |
666 __ ldr(R0, Address(R6, 0)); // Potential new object start. | 670 __ ldr(R0, Address(R6, 0)); // Potential new object start. |
667 __ adds(R7, R0, Operand(R9)); // Potential next object start. | 671 __ adds(R7, R0, Operand(R9)); // Potential next object start. |
668 __ b(&slow_case, CS); // Branch if unsigned overflow. | 672 __ b(&slow_case, CS); // Branch if unsigned overflow. |
669 | 673 |
670 // Check if the allocation fits into the remaining space. | 674 // Check if the allocation fits into the remaining space. |
671 // R0: potential new object start. | 675 // R0: potential new object start. |
(...skipping 1451 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2123 // Result: | 2127 // Result: |
2124 // R1: entry point. | 2128 // R1: entry point. |
2125 void StubCode::GenerateMegamorphicLookupStub(Assembler* assembler) { | 2129 void StubCode::GenerateMegamorphicLookupStub(Assembler* assembler) { |
2126 EmitMegamorphicLookup(assembler, R0, R1, R1); | 2130 EmitMegamorphicLookup(assembler, R0, R1, R1); |
2127 __ Ret(); | 2131 __ Ret(); |
2128 } | 2132 } |
2129 | 2133 |
2130 } // namespace dart | 2134 } // namespace dart |
2131 | 2135 |
2132 #endif // defined TARGET_ARCH_ARM | 2136 #endif // defined TARGET_ARCH_ARM |
OLD | NEW |