| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
| 6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
| 7 | 7 |
| 8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
| 9 #include "vm/compiler.h" | 9 #include "vm/compiler.h" |
| 10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
| (...skipping 593 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 604 // RBX : array element type (either NULL or an instantiated type). | 604 // RBX : array element type (either NULL or an instantiated type). |
| 605 // NOTE: R10 cannot be clobbered here as the caller relies on it being saved. | 605 // NOTE: R10 cannot be clobbered here as the caller relies on it being saved. |
| 606 // The newly allocated object is returned in RAX. | 606 // The newly allocated object is returned in RAX. |
| 607 void StubCode::GeneratePatchableAllocateArrayStub(Assembler* assembler, | 607 void StubCode::GeneratePatchableAllocateArrayStub(Assembler* assembler, |
| 608 uword* entry_patch_offset, uword* patch_code_pc_offset) { | 608 uword* entry_patch_offset, uword* patch_code_pc_offset) { |
| 609 // Must load pool pointer before being able to patch. | 609 // Must load pool pointer before being able to patch. |
| 610 Register new_pp = R13; | 610 Register new_pp = R13; |
| 611 __ LoadPoolPointer(new_pp); | 611 __ LoadPoolPointer(new_pp); |
| 612 *entry_patch_offset = assembler->CodeSize(); | 612 *entry_patch_offset = assembler->CodeSize(); |
| 613 Label slow_case; | 613 Label slow_case; |
| 614 Isolate* isolate = Isolate::Current(); |
| 615 const Class& cls = Class::Handle(isolate->object_store()->array_class()); |
| 616 ASSERT(!cls.IsNull()); |
| 614 // Compute the size to be allocated, it is based on the array length | 617 // Compute the size to be allocated, it is based on the array length |
| 615 // and is computed as: | 618 // and is computed as: |
| 616 // RoundedAllocationSize((array_length * kwordSize) + sizeof(RawArray)). | 619 // RoundedAllocationSize((array_length * kwordSize) + sizeof(RawArray)). |
| 617 __ movq(RDI, R10); // Array Length. | 620 __ movq(RDI, R10); // Array Length. |
| 618 // Check that length is a positive Smi. | 621 // Check that length is a positive Smi. |
| 619 __ testq(RDI, Immediate(kSmiTagMask)); | 622 __ testq(RDI, Immediate(kSmiTagMask)); |
| 620 __ j(NOT_ZERO, &slow_case); | 623 if (FLAG_use_slow_path || cls.trace_allocation()) { |
| 624 __ jmp(&slow_case); |
| 625 } else { |
| 626 __ j(NOT_ZERO, &slow_case); |
| 627 } |
| 621 __ cmpq(RDI, Immediate(0)); | 628 __ cmpq(RDI, Immediate(0)); |
| 622 __ j(LESS, &slow_case); | 629 __ j(LESS, &slow_case); |
| 623 // Check for maximum allowed length. | 630 // Check for maximum allowed length. |
| 624 const Immediate& max_len = | 631 const Immediate& max_len = |
| 625 Immediate(reinterpret_cast<int64_t>(Smi::New(Array::kMaxElements))); | 632 Immediate(reinterpret_cast<int64_t>(Smi::New(Array::kMaxElements))); |
| 626 __ cmpq(RDI, max_len); | 633 __ cmpq(RDI, max_len); |
| 627 __ j(GREATER, &slow_case); | 634 __ j(GREATER, &slow_case); |
| 628 const intptr_t fixed_size = sizeof(RawArray) + kObjectAlignment - 1; | 635 const intptr_t fixed_size = sizeof(RawArray) + kObjectAlignment - 1; |
| 629 __ leaq(RDI, Address(RDI, TIMES_4, fixed_size)); // RDI is a Smi. | 636 __ leaq(RDI, Address(RDI, TIMES_4, fixed_size)); // RDI is a Smi. |
| 630 ASSERT(kSmiTagShift == 1); | 637 ASSERT(kSmiTagShift == 1); |
| 631 __ andq(RDI, Immediate(-kObjectAlignment)); | 638 __ andq(RDI, Immediate(-kObjectAlignment)); |
| 632 | 639 |
| 633 Isolate* isolate = Isolate::Current(); | |
| 634 Heap* heap = isolate->heap(); | 640 Heap* heap = isolate->heap(); |
| 635 const intptr_t cid = kArrayCid; | 641 const intptr_t cid = kArrayCid; |
| 636 Heap::Space space = heap->SpaceForAllocation(cid); | 642 Heap::Space space = heap->SpaceForAllocation(cid); |
| 637 __ movq(RAX, Immediate(heap->TopAddress(space))); | 643 __ movq(RAX, Immediate(heap->TopAddress(space))); |
| 638 __ movq(RAX, Address(RAX, 0)); | 644 __ movq(RAX, Address(RAX, 0)); |
| 639 | 645 |
| 640 // RDI: allocation size. | 646 // RDI: allocation size. |
| 641 __ movq(RCX, RAX); | 647 __ movq(RCX, RAX); |
| 642 __ addq(RCX, RDI); | 648 __ addq(RCX, RDI); |
| 643 __ j(CARRY, &slow_case); | 649 __ j(CARRY, &slow_case); |
| (...skipping 1533 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2177 // Result: | 2183 // Result: |
| 2178 // RCX: entry point. | 2184 // RCX: entry point. |
| 2179 void StubCode::GenerateMegamorphicLookupStub(Assembler* assembler) { | 2185 void StubCode::GenerateMegamorphicLookupStub(Assembler* assembler) { |
| 2180 EmitMegamorphicLookup(assembler, RDI, RBX, RCX); | 2186 EmitMegamorphicLookup(assembler, RDI, RBX, RCX); |
| 2181 __ ret(); | 2187 __ ret(); |
| 2182 } | 2188 } |
| 2183 | 2189 |
| 2184 } // namespace dart | 2190 } // namespace dart |
| 2185 | 2191 |
| 2186 #endif // defined TARGET_ARCH_X64 | 2192 #endif // defined TARGET_ARCH_X64 |
| OLD | NEW |