Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" // NOLINT | 5 #include "vm/globals.h" // NOLINT |
| 6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
| 7 | 7 |
| 8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
| 9 #include "vm/cpu.h" | 9 #include "vm/cpu.h" |
| 10 #include "vm/heap.h" | 10 #include "vm/heap.h" |
| 11 #include "vm/instructions.h" | 11 #include "vm/instructions.h" |
| 12 #include "vm/locations.h" | 12 #include "vm/locations.h" |
| 13 #include "vm/memory_region.h" | 13 #include "vm/memory_region.h" |
| 14 #include "vm/runtime_entry.h" | 14 #include "vm/runtime_entry.h" |
| 15 #include "vm/stack_frame.h" | 15 #include "vm/stack_frame.h" |
| 16 #include "vm/stub_code.h" | 16 #include "vm/stub_code.h" |
| 17 | 17 |
| 18 namespace dart { | 18 namespace dart { |
| 19 | 19 |
| 20 DEFINE_FLAG(bool, print_stop_message, true, "Print stop message."); | 20 DEFINE_FLAG(bool, print_stop_message, true, "Print stop message."); |
| 21 DECLARE_FLAG(bool, inline_alloc); | 21 DECLARE_FLAG(bool, inline_alloc); |
| 22 | 22 |
| 23 | 23 |
| 24 Assembler::Assembler(bool use_far_branches) | 24 Assembler::Assembler(bool use_far_branches) |
| 25 : buffer_(), | 25 : buffer_(), |
| 26 prologue_offset_(-1), | 26 prologue_offset_(-1), |
| 27 comments_(), | 27 comments_(), |
| 28 constant_pool_allowed_(true) { | 28 constant_pool_allowed_(false) { |
| 29 // Far branching mode is only needed and implemented for MIPS and ARM. | 29 // Far branching mode is only needed and implemented for MIPS and ARM. |
| 30 ASSERT(!use_far_branches); | 30 ASSERT(!use_far_branches); |
| 31 } | 31 } |
| 32 | 32 |
| 33 | 33 |
| 34 void Assembler::InitializeMemoryWithBreakpoints(uword data, intptr_t length) { | 34 void Assembler::InitializeMemoryWithBreakpoints(uword data, intptr_t length) { |
| 35 memset(reinterpret_cast<void*>(data), Instr::kBreakPointInstruction, length); | 35 memset(reinterpret_cast<void*>(data), Instr::kBreakPointInstruction, length); |
| 36 } | 36 } |
| 37 | 37 |
| 38 | 38 |
| (...skipping 17 matching lines...) Expand all Loading... | |
| 56 void Assembler::call(Label* label) { | 56 void Assembler::call(Label* label) { |
| 57 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 57 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 58 static const int kSize = 5; | 58 static const int kSize = 5; |
| 59 EmitUint8(0xE8); | 59 EmitUint8(0xE8); |
| 60 EmitLabel(label, kSize); | 60 EmitLabel(label, kSize); |
| 61 } | 61 } |
| 62 | 62 |
| 63 | 63 |
| 64 void Assembler::LoadExternalLabel(Register dst, | 64 void Assembler::LoadExternalLabel(Register dst, |
| 65 const ExternalLabel* label, | 65 const ExternalLabel* label, |
| 66 Patchability patchable, | 66 Patchability patchable) { |
| 67 Register pp) { | |
| 68 const int32_t offset = ObjectPool::element_offset( | 67 const int32_t offset = ObjectPool::element_offset( |
| 69 object_pool_wrapper_.FindExternalLabel(label, patchable)); | 68 object_pool_wrapper_.FindExternalLabel(label, patchable)); |
| 70 LoadWordFromPoolOffset(dst, pp, offset - kHeapObjectTag); | 69 LoadWordFromPoolOffset(dst, offset - kHeapObjectTag); |
| 71 } | 70 } |
| 72 | 71 |
| 73 | 72 |
| 74 void Assembler::call(const ExternalLabel* label) { | 73 void Assembler::call(const ExternalLabel* label) { |
| 75 { // Encode movq(TMP, Immediate(label->address())), but always as imm64. | 74 { // Encode movq(TMP, Immediate(label->address())), but always as imm64. |
| 76 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 75 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 77 EmitRegisterREX(TMP, REX_W); | 76 EmitRegisterREX(TMP, REX_W); |
| 78 EmitUint8(0xB8 | (TMP & 7)); | 77 EmitUint8(0xB8 | (TMP & 7)); |
| 79 EmitInt64(label->address()); | 78 EmitInt64(label->address()); |
| 80 } | 79 } |
| 81 call(TMP); | 80 call(TMP); |
| 82 } | 81 } |
| 83 | 82 |
| 84 | 83 |
| 85 void Assembler::CallPatchable(const ExternalLabel* label) { | 84 void Assembler::CallPatchable(const ExternalLabel* label) { |
| 86 ASSERT(constant_pool_allowed()); | 85 ASSERT(constant_pool_allowed()); |
| 87 intptr_t call_start = buffer_.GetPosition(); | 86 intptr_t call_start = buffer_.GetPosition(); |
| 88 const int32_t offset = ObjectPool::element_offset( | 87 const int32_t offset = ObjectPool::element_offset( |
| 89 object_pool_wrapper_.FindExternalLabel(label, kPatchable)); | 88 object_pool_wrapper_.FindExternalLabel(label, kPatchable)); |
| 90 call(Address::AddressBaseImm32(PP, offset - kHeapObjectTag)); | 89 call(Address::AddressBaseImm32(PP, offset - kHeapObjectTag)); |
| 91 ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize); | 90 ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize); |
| 92 } | 91 } |
| 93 | 92 |
| 94 | 93 |
| 95 void Assembler::Call(const ExternalLabel* label, Register pp) { | 94 void Assembler::Call(const ExternalLabel* label) { |
| 95 ASSERT(constant_pool_allowed()); | |
| 96 const int32_t offset = ObjectPool::element_offset( | 96 const int32_t offset = ObjectPool::element_offset( |
| 97 object_pool_wrapper_.FindExternalLabel(label, kNotPatchable)); | 97 object_pool_wrapper_.FindExternalLabel(label, kNotPatchable)); |
| 98 call(Address::AddressBaseImm32(pp, offset - kHeapObjectTag)); | 98 call(Address::AddressBaseImm32(PP, offset - kHeapObjectTag)); |
| 99 } | 99 } |
| 100 | 100 |
| 101 | 101 |
| 102 void Assembler::pushq(Register reg) { | 102 void Assembler::pushq(Register reg) { |
| 103 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 103 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 104 EmitRegisterREX(reg, REX_NONE); | 104 EmitRegisterREX(reg, REX_NONE); |
| 105 EmitUint8(0x50 | (reg & 7)); | 105 EmitUint8(0x50 | (reg & 7)); |
| 106 } | 106 } |
| 107 | 107 |
| 108 | 108 |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 119 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 119 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 120 EmitUint8(0x68); | 120 EmitUint8(0x68); |
| 121 EmitImmediate(imm); | 121 EmitImmediate(imm); |
| 122 } else { | 122 } else { |
| 123 movq(TMP, imm); | 123 movq(TMP, imm); |
| 124 pushq(TMP); | 124 pushq(TMP); |
| 125 } | 125 } |
| 126 } | 126 } |
| 127 | 127 |
| 128 | 128 |
| 129 void Assembler::PushImmediate(const Immediate& imm, Register pp) { | 129 void Assembler::PushImmediate(const Immediate& imm) { |
| 130 if (CanLoadImmediateFromPool(imm, pp)) { | 130 if (imm.is_int32()) { |
| 131 LoadImmediate(TMP, imm, pp); | 131 pushq(imm); |
| 132 } else { | |
| 133 LoadImmediate(TMP, imm); | |
| 132 pushq(TMP); | 134 pushq(TMP); |
| 133 } else { | |
| 134 pushq(imm); | |
| 135 } | 135 } |
| 136 } | 136 } |
| 137 | 137 |
| 138 | 138 |
| 139 void Assembler::popq(Register reg) { | 139 void Assembler::popq(Register reg) { |
| 140 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 140 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 141 EmitRegisterREX(reg, REX_NONE); | 141 EmitRegisterREX(reg, REX_NONE); |
| 142 EmitUint8(0x58 | (reg & 7)); | 142 EmitUint8(0x58 | (reg & 7)); |
| 143 } | 143 } |
| 144 | 144 |
| (...skipping 630 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 775 | 775 |
| 776 void Assembler::notps(XmmRegister dst) { | 776 void Assembler::notps(XmmRegister dst) { |
| 777 static const struct ALIGN16 { | 777 static const struct ALIGN16 { |
| 778 uint32_t a; | 778 uint32_t a; |
| 779 uint32_t b; | 779 uint32_t b; |
| 780 uint32_t c; | 780 uint32_t c; |
| 781 uint32_t d; | 781 uint32_t d; |
| 782 } float_not_constant = | 782 } float_not_constant = |
| 783 { 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF }; | 783 { 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF }; |
| 784 LoadImmediate( | 784 LoadImmediate( |
| 785 TMP, Immediate(reinterpret_cast<intptr_t>(&float_not_constant)), PP); | 785 TMP, Immediate(reinterpret_cast<intptr_t>(&float_not_constant))); |
| 786 xorps(dst, Address(TMP, 0)); | 786 xorps(dst, Address(TMP, 0)); |
| 787 } | 787 } |
| 788 | 788 |
| 789 | 789 |
| 790 void Assembler::negateps(XmmRegister dst) { | 790 void Assembler::negateps(XmmRegister dst) { |
| 791 static const struct ALIGN16 { | 791 static const struct ALIGN16 { |
| 792 uint32_t a; | 792 uint32_t a; |
| 793 uint32_t b; | 793 uint32_t b; |
| 794 uint32_t c; | 794 uint32_t c; |
| 795 uint32_t d; | 795 uint32_t d; |
| 796 } float_negate_constant = | 796 } float_negate_constant = |
| 797 { 0x80000000, 0x80000000, 0x80000000, 0x80000000 }; | 797 { 0x80000000, 0x80000000, 0x80000000, 0x80000000 }; |
| 798 LoadImmediate( | 798 LoadImmediate( |
| 799 TMP, Immediate(reinterpret_cast<intptr_t>(&float_negate_constant)), PP); | 799 TMP, Immediate(reinterpret_cast<intptr_t>(&float_negate_constant))); |
| 800 xorps(dst, Address(TMP, 0)); | 800 xorps(dst, Address(TMP, 0)); |
| 801 } | 801 } |
| 802 | 802 |
| 803 | 803 |
| 804 void Assembler::absps(XmmRegister dst) { | 804 void Assembler::absps(XmmRegister dst) { |
| 805 static const struct ALIGN16 { | 805 static const struct ALIGN16 { |
| 806 uint32_t a; | 806 uint32_t a; |
| 807 uint32_t b; | 807 uint32_t b; |
| 808 uint32_t c; | 808 uint32_t c; |
| 809 uint32_t d; | 809 uint32_t d; |
| 810 } float_absolute_constant = | 810 } float_absolute_constant = |
| 811 { 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF }; | 811 { 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF }; |
| 812 LoadImmediate( | 812 LoadImmediate( |
| 813 TMP, Immediate(reinterpret_cast<intptr_t>(&float_absolute_constant)), PP); | 813 TMP, Immediate(reinterpret_cast<intptr_t>(&float_absolute_constant))); |
| 814 andps(dst, Address(TMP, 0)); | 814 andps(dst, Address(TMP, 0)); |
| 815 } | 815 } |
| 816 | 816 |
| 817 | 817 |
| 818 void Assembler::zerowps(XmmRegister dst) { | 818 void Assembler::zerowps(XmmRegister dst) { |
| 819 static const struct ALIGN16 { | 819 static const struct ALIGN16 { |
| 820 uint32_t a; | 820 uint32_t a; |
| 821 uint32_t b; | 821 uint32_t b; |
| 822 uint32_t c; | 822 uint32_t c; |
| 823 uint32_t d; | 823 uint32_t d; |
| 824 } float_zerow_constant = | 824 } float_zerow_constant = |
| 825 { 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000 }; | 825 { 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000 }; |
| 826 LoadImmediate( | 826 LoadImmediate( |
| 827 TMP, Immediate(reinterpret_cast<intptr_t>(&float_zerow_constant)), PP); | 827 TMP, Immediate(reinterpret_cast<intptr_t>(&float_zerow_constant))); |
| 828 andps(dst, Address(TMP, 0)); | 828 andps(dst, Address(TMP, 0)); |
| 829 } | 829 } |
| 830 | 830 |
| 831 | 831 |
| 832 void Assembler::cmppseq(XmmRegister dst, XmmRegister src) { | 832 void Assembler::cmppseq(XmmRegister dst, XmmRegister src) { |
| 833 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 833 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 834 EmitREX_RB(dst, src); | 834 EmitREX_RB(dst, src); |
| 835 EmitUint8(0x0F); | 835 EmitUint8(0x0F); |
| 836 EmitUint8(0xC2); | 836 EmitUint8(0xC2); |
| 837 EmitXmmRegisterOperand(dst & 7, src); | 837 EmitXmmRegisterOperand(dst & 7, src); |
| (...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1005 } | 1005 } |
| 1006 | 1006 |
| 1007 | 1007 |
| 1008 void Assembler::negatepd(XmmRegister dst) { | 1008 void Assembler::negatepd(XmmRegister dst) { |
| 1009 static const struct ALIGN16 { | 1009 static const struct ALIGN16 { |
| 1010 uint64_t a; | 1010 uint64_t a; |
| 1011 uint64_t b; | 1011 uint64_t b; |
| 1012 } double_negate_constant = | 1012 } double_negate_constant = |
| 1013 { 0x8000000000000000LL, 0x8000000000000000LL }; | 1013 { 0x8000000000000000LL, 0x8000000000000000LL }; |
| 1014 LoadImmediate( | 1014 LoadImmediate( |
| 1015 TMP, Immediate(reinterpret_cast<intptr_t>(&double_negate_constant)), PP); | 1015 TMP, Immediate(reinterpret_cast<intptr_t>(&double_negate_constant))); |
| 1016 xorpd(dst, Address(TMP, 0)); | 1016 xorpd(dst, Address(TMP, 0)); |
| 1017 } | 1017 } |
| 1018 | 1018 |
| 1019 | 1019 |
| 1020 void Assembler::subpd(XmmRegister dst, XmmRegister src) { | 1020 void Assembler::subpd(XmmRegister dst, XmmRegister src) { |
| 1021 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 1021 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 1022 ASSERT(src <= XMM15); | 1022 ASSERT(src <= XMM15); |
| 1023 ASSERT(dst <= XMM15); | 1023 ASSERT(dst <= XMM15); |
| 1024 EmitUint8(0x66); | 1024 EmitUint8(0x66); |
| 1025 EmitREX_RB(dst, src); | 1025 EmitREX_RB(dst, src); |
| (...skipping 27 matching lines...) Expand all Loading... | |
| 1053 } | 1053 } |
| 1054 | 1054 |
| 1055 | 1055 |
| 1056 void Assembler::abspd(XmmRegister dst) { | 1056 void Assembler::abspd(XmmRegister dst) { |
| 1057 static const struct ALIGN16 { | 1057 static const struct ALIGN16 { |
| 1058 uint64_t a; | 1058 uint64_t a; |
| 1059 uint64_t b; | 1059 uint64_t b; |
| 1060 } double_absolute_const = | 1060 } double_absolute_const = |
| 1061 { 0x7FFFFFFFFFFFFFFFLL, 0x7FFFFFFFFFFFFFFFLL }; | 1061 { 0x7FFFFFFFFFFFFFFFLL, 0x7FFFFFFFFFFFFFFFLL }; |
| 1062 LoadImmediate( | 1062 LoadImmediate( |
| 1063 TMP, Immediate(reinterpret_cast<intptr_t>(&double_absolute_const)), PP); | 1063 TMP, Immediate(reinterpret_cast<intptr_t>(&double_absolute_const))); |
| 1064 andpd(dst, Address(TMP, 0)); | 1064 andpd(dst, Address(TMP, 0)); |
| 1065 } | 1065 } |
| 1066 | 1066 |
| 1067 | 1067 |
| 1068 void Assembler::minpd(XmmRegister dst, XmmRegister src) { | 1068 void Assembler::minpd(XmmRegister dst, XmmRegister src) { |
| 1069 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 1069 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 1070 ASSERT(src <= XMM15); | 1070 ASSERT(src <= XMM15); |
| 1071 ASSERT(dst <= XMM15); | 1071 ASSERT(dst <= XMM15); |
| 1072 EmitUint8(0x66); | 1072 EmitUint8(0x66); |
| 1073 EmitREX_RB(dst, src); | 1073 EmitREX_RB(dst, src); |
| (...skipping 415 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1489 | 1489 |
| 1490 | 1490 |
| 1491 void Assembler::cmpq(Register reg, const Address& address) { | 1491 void Assembler::cmpq(Register reg, const Address& address) { |
| 1492 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 1492 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 1493 EmitOperandREX(reg, address, REX_W); | 1493 EmitOperandREX(reg, address, REX_W); |
| 1494 EmitUint8(0x3B); | 1494 EmitUint8(0x3B); |
| 1495 EmitOperand(reg & 7, address); | 1495 EmitOperand(reg & 7, address); |
| 1496 } | 1496 } |
| 1497 | 1497 |
| 1498 | 1498 |
| 1499 void Assembler::CompareImmediate(Register reg, const Immediate& imm, | 1499 void Assembler::CompareImmediate(Register reg, const Immediate& imm) { |
| 1500 Register pp) { | 1500 if (imm.is_int32()) { |
| 1501 if (CanLoadImmediateFromPool(imm, pp)) { | 1501 cmpq(reg, imm); |
| 1502 LoadImmediate(TMP, imm, pp); | 1502 } else { |
| 1503 LoadImmediate(TMP, imm); | |
|
zra
2015/07/31 17:37:28
assert reg != TMP
regis
2015/07/31 19:33:59
Done here and at a bunch of other places.
| |
| 1503 cmpq(reg, TMP); | 1504 cmpq(reg, TMP); |
| 1504 } else { | |
| 1505 cmpq(reg, imm); | |
| 1506 } | 1505 } |
| 1507 } | 1506 } |
| 1508 | 1507 |
| 1509 | 1508 |
| 1510 void Assembler::CompareImmediate(const Address& address, const Immediate& imm, | 1509 void Assembler::CompareImmediate(const Address& address, const Immediate& imm) { |
| 1511 Register pp) { | 1510 if (imm.is_int32()) { |
| 1512 if (CanLoadImmediateFromPool(imm, pp)) { | 1511 cmpq(address, imm); |
| 1513 LoadImmediate(TMP, imm, pp); | 1512 } else { |
| 1513 LoadImmediate(TMP, imm); | |
|
zra
2015/07/31 17:37:28
Could address refer to TMP?
regis
2015/07/31 19:33:59
Since I did not change this, we would know by now
| |
| 1514 cmpq(address, TMP); | 1514 cmpq(address, TMP); |
| 1515 } else { | |
| 1516 cmpq(address, imm); | |
| 1517 } | 1515 } |
| 1518 } | 1516 } |
| 1519 | 1517 |
| 1520 | 1518 |
| 1521 void Assembler::testl(Register reg1, Register reg2) { | 1519 void Assembler::testl(Register reg1, Register reg2) { |
| 1522 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 1520 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 1523 Operand operand(reg2); | 1521 Operand operand(reg2); |
| 1524 EmitOperandREX(reg1, operand, REX_NONE); | 1522 EmitOperandREX(reg1, operand, REX_NONE); |
| 1525 EmitUint8(0x85); | 1523 EmitUint8(0x85); |
| 1526 EmitOperand(reg1 & 7, operand); | 1524 EmitOperand(reg1 & 7, operand); |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1601 } else { | 1599 } else { |
| 1602 EmitRegisterREX(reg, REX_W); | 1600 EmitRegisterREX(reg, REX_W); |
| 1603 EmitUint8(0xF7); | 1601 EmitUint8(0xF7); |
| 1604 EmitUint8(0xC0 | (reg & 7)); | 1602 EmitUint8(0xC0 | (reg & 7)); |
| 1605 } | 1603 } |
| 1606 EmitImmediate(imm); | 1604 EmitImmediate(imm); |
| 1607 } | 1605 } |
| 1608 } | 1606 } |
| 1609 | 1607 |
| 1610 | 1608 |
| 1611 void Assembler::TestImmediate(Register dst, const Immediate& imm, Register pp) { | 1609 void Assembler::TestImmediate(Register dst, const Immediate& imm) { |
| 1612 if (CanLoadImmediateFromPool(imm, pp)) { | 1610 if (imm.is_int32()) { |
| 1611 testq(dst, imm); | |
| 1612 } else { | |
| 1613 ASSERT(dst != TMP); | 1613 ASSERT(dst != TMP); |
| 1614 LoadImmediate(TMP, imm, pp); | 1614 LoadImmediate(TMP, imm); |
| 1615 testq(dst, TMP); | 1615 testq(dst, TMP); |
| 1616 } else { | |
| 1617 testq(dst, imm); | |
| 1618 } | 1616 } |
| 1619 } | 1617 } |
| 1620 | 1618 |
| 1621 | 1619 |
| 1622 void Assembler::andl(Register dst, Register src) { | 1620 void Assembler::andl(Register dst, Register src) { |
| 1623 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 1621 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 1624 Operand operand(src); | 1622 Operand operand(src); |
| 1625 EmitOperandREX(dst, operand, REX_NONE); | 1623 EmitOperandREX(dst, operand, REX_NONE); |
| 1626 EmitUint8(0x23); | 1624 EmitUint8(0x23); |
| 1627 EmitOperand(dst & 7, operand); | 1625 EmitOperand(dst & 7, operand); |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1690 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 1688 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 1691 EmitRegisterREX(dst, REX_W); | 1689 EmitRegisterREX(dst, REX_W); |
| 1692 EmitComplex(4, Operand(dst), imm); | 1690 EmitComplex(4, Operand(dst), imm); |
| 1693 } else { | 1691 } else { |
| 1694 movq(TMP, imm); | 1692 movq(TMP, imm); |
| 1695 andq(dst, TMP); | 1693 andq(dst, TMP); |
| 1696 } | 1694 } |
| 1697 } | 1695 } |
| 1698 | 1696 |
| 1699 | 1697 |
| 1700 void Assembler::AndImmediate(Register dst, const Immediate& imm, Register pp) { | 1698 void Assembler::AndImmediate(Register dst, const Immediate& imm) { |
| 1701 if (CanLoadImmediateFromPool(imm, pp)) { | 1699 if (imm.is_int32()) { |
| 1700 andq(dst, imm); | |
| 1701 } else { | |
| 1702 ASSERT(dst != TMP); | 1702 ASSERT(dst != TMP); |
| 1703 LoadImmediate(TMP, imm, pp); | 1703 LoadImmediate(TMP, imm); |
| 1704 andq(dst, TMP); | 1704 andq(dst, TMP); |
| 1705 } else { | |
| 1706 andq(dst, imm); | |
| 1707 } | 1705 } |
| 1708 } | 1706 } |
| 1709 | 1707 |
| 1710 | 1708 |
| 1711 void Assembler::orq(Register dst, Register src) { | 1709 void Assembler::orq(Register dst, Register src) { |
| 1712 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 1710 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 1713 Operand operand(src); | 1711 Operand operand(src); |
| 1714 EmitOperandREX(dst, operand, REX_W); | 1712 EmitOperandREX(dst, operand, REX_W); |
| 1715 EmitUint8(0x0B); | 1713 EmitUint8(0x0B); |
| 1716 EmitOperand(dst & 7, operand); | 1714 EmitOperand(dst & 7, operand); |
| (...skipping 13 matching lines...) Expand all Loading... | |
| 1730 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 1728 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 1731 EmitRegisterREX(dst, REX_W); | 1729 EmitRegisterREX(dst, REX_W); |
| 1732 EmitComplex(1, Operand(dst), imm); | 1730 EmitComplex(1, Operand(dst), imm); |
| 1733 } else { | 1731 } else { |
| 1734 movq(TMP, imm); | 1732 movq(TMP, imm); |
| 1735 orq(dst, TMP); | 1733 orq(dst, TMP); |
| 1736 } | 1734 } |
| 1737 } | 1735 } |
| 1738 | 1736 |
| 1739 | 1737 |
| 1740 void Assembler::OrImmediate(Register dst, const Immediate& imm, Register pp) { | 1738 void Assembler::OrImmediate(Register dst, const Immediate& imm) { |
| 1741 if (CanLoadImmediateFromPool(imm, pp)) { | 1739 if (imm.is_int32()) { |
| 1740 orq(dst, imm); | |
| 1741 } else { | |
| 1742 ASSERT(dst != TMP); | 1742 ASSERT(dst != TMP); |
| 1743 LoadImmediate(TMP, imm, pp); | 1743 LoadImmediate(TMP, imm); |
| 1744 orq(dst, TMP); | 1744 orq(dst, TMP); |
| 1745 } else { | |
| 1746 orq(dst, imm); | |
| 1747 } | 1745 } |
| 1748 } | 1746 } |
| 1749 | 1747 |
| 1750 | 1748 |
| 1751 void Assembler::xorq(Register dst, Register src) { | 1749 void Assembler::xorq(Register dst, Register src) { |
| 1752 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 1750 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 1753 Operand operand(src); | 1751 Operand operand(src); |
| 1754 EmitOperandREX(dst, operand, REX_W); | 1752 EmitOperandREX(dst, operand, REX_W); |
| 1755 EmitUint8(0x33); | 1753 EmitUint8(0x33); |
| 1756 EmitOperand(dst & 7, operand); | 1754 EmitOperand(dst & 7, operand); |
| (...skipping 21 matching lines...) Expand all Loading... | |
| 1778 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 1776 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 1779 EmitRegisterREX(dst, REX_W); | 1777 EmitRegisterREX(dst, REX_W); |
| 1780 EmitComplex(6, Operand(dst), imm); | 1778 EmitComplex(6, Operand(dst), imm); |
| 1781 } else { | 1779 } else { |
| 1782 movq(TMP, imm); | 1780 movq(TMP, imm); |
| 1783 xorq(dst, TMP); | 1781 xorq(dst, TMP); |
| 1784 } | 1782 } |
| 1785 } | 1783 } |
| 1786 | 1784 |
| 1787 | 1785 |
| 1788 void Assembler::XorImmediate(Register dst, const Immediate& imm, Register pp) { | 1786 void Assembler::XorImmediate(Register dst, const Immediate& imm) { |
| 1789 if (CanLoadImmediateFromPool(imm, pp)) { | 1787 if (imm.is_int32()) { |
| 1788 xorq(dst, imm); | |
| 1789 } else { | |
| 1790 ASSERT(dst != TMP); | 1790 ASSERT(dst != TMP); |
| 1791 LoadImmediate(TMP, imm, pp); | 1791 LoadImmediate(TMP, imm); |
| 1792 xorq(dst, TMP); | 1792 xorq(dst, TMP); |
| 1793 } else { | |
| 1794 xorq(dst, imm); | |
| 1795 } | 1793 } |
| 1796 } | 1794 } |
| 1797 | 1795 |
| 1798 | 1796 |
| 1799 void Assembler::addl(Register dst, Register src) { | 1797 void Assembler::addl(Register dst, Register src) { |
| 1800 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 1798 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 1801 Operand operand(src); | 1799 Operand operand(src); |
| 1802 EmitOperandREX(dst, operand, REX_NONE); | 1800 EmitOperandREX(dst, operand, REX_NONE); |
| 1803 EmitUint8(0x03); | 1801 EmitUint8(0x03); |
| 1804 EmitOperand(dst & 7, operand); | 1802 EmitOperand(dst & 7, operand); |
| (...skipping 268 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2073 EmitUint8(0x69); | 2071 EmitUint8(0x69); |
| 2074 EmitOperand(reg & 7, Operand(reg)); | 2072 EmitOperand(reg & 7, Operand(reg)); |
| 2075 EmitImmediate(imm); | 2073 EmitImmediate(imm); |
| 2076 } else { | 2074 } else { |
| 2077 movq(TMP, imm); | 2075 movq(TMP, imm); |
| 2078 imulq(reg, TMP); | 2076 imulq(reg, TMP); |
| 2079 } | 2077 } |
| 2080 } | 2078 } |
| 2081 | 2079 |
| 2082 | 2080 |
| 2083 void Assembler::MulImmediate(Register reg, const Immediate& imm, Register pp) { | 2081 void Assembler::MulImmediate(Register reg, const Immediate& imm) { |
| 2084 if (CanLoadImmediateFromPool(imm, pp)) { | 2082 if (imm.is_int32()) { |
| 2083 imulq(reg, imm); | |
| 2084 } else { | |
| 2085 ASSERT(reg != TMP); | 2085 ASSERT(reg != TMP); |
| 2086 LoadImmediate(TMP, imm, pp); | 2086 LoadImmediate(TMP, imm); |
| 2087 imulq(reg, TMP); | 2087 imulq(reg, TMP); |
| 2088 } else { | |
| 2089 imulq(reg, imm); | |
| 2090 } | 2088 } |
| 2091 } | 2089 } |
| 2092 | 2090 |
| 2093 | 2091 |
| 2094 void Assembler::imulq(Register dst, const Address& address) { | 2092 void Assembler::imulq(Register dst, const Address& address) { |
| 2095 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 2093 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 2096 EmitOperandREX(dst, address, REX_W); | 2094 EmitOperandREX(dst, address, REX_W); |
| 2097 EmitUint8(0x0F); | 2095 EmitUint8(0x0F); |
| 2098 EmitUint8(0xAF); | 2096 EmitUint8(0xAF); |
| 2099 EmitOperand(dst & 7, address); | 2097 EmitOperand(dst & 7, address); |
| (...skipping 492 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2592 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 2590 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 2593 EmitRegisterREX(TMP, REX_W); | 2591 EmitRegisterREX(TMP, REX_W); |
| 2594 EmitUint8(0xB8 | (TMP & 7)); | 2592 EmitUint8(0xB8 | (TMP & 7)); |
| 2595 EmitInt64(label->address()); | 2593 EmitInt64(label->address()); |
| 2596 } | 2594 } |
| 2597 jmp(TMP); | 2595 jmp(TMP); |
| 2598 } | 2596 } |
| 2599 | 2597 |
| 2600 | 2598 |
| 2601 void Assembler::JmpPatchable(const ExternalLabel* label, Register pp) { | 2599 void Assembler::JmpPatchable(const ExternalLabel* label, Register pp) { |
| 2602 ASSERT(constant_pool_allowed()); | 2600 ASSERT((pp != PP) || constant_pool_allowed()); |
| 2603 intptr_t call_start = buffer_.GetPosition(); | 2601 intptr_t call_start = buffer_.GetPosition(); |
| 2604 const int32_t offset = ObjectPool::element_offset( | 2602 const int32_t offset = ObjectPool::element_offset( |
| 2605 object_pool_wrapper_.FindExternalLabel(label, kPatchable)); | 2603 object_pool_wrapper_.FindExternalLabel(label, kPatchable)); |
| 2606 // Patchable jumps always use a 32-bit immediate encoding. | 2604 // Patchable jumps always use a 32-bit immediate encoding. |
| 2607 jmp(Address::AddressBaseImm32(pp, offset - kHeapObjectTag)); | 2605 jmp(Address::AddressBaseImm32(pp, offset - kHeapObjectTag)); |
| 2608 ASSERT((buffer_.GetPosition() - call_start) == JumpPattern::kLengthInBytes); | 2606 ASSERT((buffer_.GetPosition() - call_start) == JumpPattern::kLengthInBytes); |
| 2609 } | 2607 } |
| 2610 | 2608 |
| 2611 | 2609 |
| 2612 void Assembler::Jmp(const ExternalLabel* label, Register pp) { | 2610 void Assembler::Jmp(const ExternalLabel* label, Register pp) { |
| 2611 ASSERT((pp != PP) || constant_pool_allowed()); | |
| 2613 const int32_t offset = ObjectPool::element_offset( | 2612 const int32_t offset = ObjectPool::element_offset( |
| 2614 object_pool_wrapper_.FindExternalLabel(label, kNotPatchable)); | 2613 object_pool_wrapper_.FindExternalLabel(label, kNotPatchable)); |
| 2615 jmp(Address(pp, offset - kHeapObjectTag)); | 2614 jmp(Address(pp, offset - kHeapObjectTag)); |
| 2616 } | 2615 } |
| 2617 | 2616 |
| 2618 | 2617 |
| 2619 void Assembler::lock() { | 2618 void Assembler::lock() { |
| 2620 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 2619 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 2621 EmitUint8(0xF0); | 2620 EmitUint8(0xF0); |
| 2622 } | 2621 } |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2657 movq(to, from); | 2656 movq(to, from); |
| 2658 } | 2657 } |
| 2659 } | 2658 } |
| 2660 | 2659 |
| 2661 | 2660 |
| 2662 void Assembler::PopRegister(Register r) { | 2661 void Assembler::PopRegister(Register r) { |
| 2663 popq(r); | 2662 popq(r); |
| 2664 } | 2663 } |
| 2665 | 2664 |
| 2666 | 2665 |
| 2667 void Assembler::AddImmediate(Register reg, const Immediate& imm, Register pp) { | 2666 void Assembler::AddImmediate(Register reg, const Immediate& imm) { |
| 2668 const int64_t value = imm.value(); | 2667 const int64_t value = imm.value(); |
| 2669 if (value == 0) { | 2668 if (value == 0) { |
| 2670 return; | 2669 return; |
| 2671 } | 2670 } |
| 2672 if ((value > 0) || (value == kMinInt64)) { | 2671 if ((value > 0) || (value == kMinInt64)) { |
| 2673 if (value == 1) { | 2672 if (value == 1) { |
| 2674 incq(reg); | 2673 incq(reg); |
| 2675 } else { | 2674 } else { |
| 2676 if (CanLoadImmediateFromPool(imm, pp)) { | 2675 if (imm.is_int32()) { |
| 2676 addq(reg, imm); | |
| 2677 } else { | |
| 2677 ASSERT(reg != TMP); | 2678 ASSERT(reg != TMP); |
| 2678 LoadImmediate(TMP, imm, pp); | 2679 LoadImmediate(TMP, imm); |
| 2679 addq(reg, TMP); | 2680 addq(reg, TMP); |
| 2680 } else { | |
| 2681 addq(reg, imm); | |
| 2682 } | 2681 } |
| 2683 } | 2682 } |
| 2684 } else { | 2683 } else { |
| 2685 SubImmediate(reg, Immediate(-value), pp); | 2684 SubImmediate(reg, Immediate(-value)); |
| 2686 } | 2685 } |
| 2687 } | 2686 } |
| 2688 | 2687 |
| 2689 | 2688 |
| 2690 void Assembler::AddImmediate(const Address& address, const Immediate& imm, | 2689 void Assembler::AddImmediate(const Address& address, const Immediate& imm) { |
| 2691 Register pp) { | |
| 2692 const int64_t value = imm.value(); | 2690 const int64_t value = imm.value(); |
| 2693 if (value == 0) { | 2691 if (value == 0) { |
| 2694 return; | 2692 return; |
| 2695 } | 2693 } |
| 2696 if ((value > 0) || (value == kMinInt64)) { | 2694 if ((value > 0) || (value == kMinInt64)) { |
| 2697 if (value == 1) { | 2695 if (value == 1) { |
| 2698 incq(address); | 2696 incq(address); |
| 2699 } else { | 2697 } else { |
| 2700 if (CanLoadImmediateFromPool(imm, pp)) { | 2698 if (imm.is_int32()) { |
| 2701 LoadImmediate(TMP, imm, pp); | 2699 addq(address, imm); |
| 2700 } else { | |
| 2701 LoadImmediate(TMP, imm); | |
| 2702 addq(address, TMP); | 2702 addq(address, TMP); |
| 2703 } else { | |
| 2704 addq(address, imm); | |
| 2705 } | 2703 } |
| 2706 } | 2704 } |
| 2707 } else { | 2705 } else { |
| 2708 SubImmediate(address, Immediate(-value), pp); | 2706 SubImmediate(address, Immediate(-value)); |
| 2709 } | 2707 } |
| 2710 } | 2708 } |
| 2711 | 2709 |
| 2712 | 2710 |
| 2713 void Assembler::SubImmediate(Register reg, const Immediate& imm, Register pp) { | 2711 void Assembler::SubImmediate(Register reg, const Immediate& imm) { |
| 2714 const int64_t value = imm.value(); | 2712 const int64_t value = imm.value(); |
| 2715 if (value == 0) { | 2713 if (value == 0) { |
| 2716 return; | 2714 return; |
| 2717 } | 2715 } |
| 2718 if ((value > 0) || (value == kMinInt64)) { | 2716 if ((value > 0) || (value == kMinInt64)) { |
| 2719 if (value == 1) { | 2717 if (value == 1) { |
| 2720 decq(reg); | 2718 decq(reg); |
| 2721 } else { | 2719 } else { |
| 2722 if (CanLoadImmediateFromPool(imm, pp)) { | 2720 if (imm.is_int32()) { |
| 2721 subq(reg, imm); | |
| 2722 } else { | |
| 2723 ASSERT(reg != TMP); | 2723 ASSERT(reg != TMP); |
| 2724 LoadImmediate(TMP, imm, pp); | 2724 LoadImmediate(TMP, imm); |
| 2725 subq(reg, TMP); | 2725 subq(reg, TMP); |
| 2726 } else { | |
| 2727 subq(reg, imm); | |
| 2728 } | 2726 } |
| 2729 } | 2727 } |
| 2730 } else { | 2728 } else { |
| 2731 AddImmediate(reg, Immediate(-value), pp); | 2729 AddImmediate(reg, Immediate(-value)); |
| 2732 } | 2730 } |
| 2733 } | 2731 } |
| 2734 | 2732 |
| 2735 | 2733 |
| 2736 void Assembler::SubImmediate(const Address& address, const Immediate& imm, | 2734 void Assembler::SubImmediate(const Address& address, const Immediate& imm) { |
| 2737 Register pp) { | |
| 2738 const int64_t value = imm.value(); | 2735 const int64_t value = imm.value(); |
| 2739 if (value == 0) { | 2736 if (value == 0) { |
| 2740 return; | 2737 return; |
| 2741 } | 2738 } |
| 2742 if ((value > 0) || (value == kMinInt64)) { | 2739 if ((value > 0) || (value == kMinInt64)) { |
| 2743 if (value == 1) { | 2740 if (value == 1) { |
| 2744 decq(address); | 2741 decq(address); |
| 2745 } else { | 2742 } else { |
| 2746 if (CanLoadImmediateFromPool(imm, pp)) { | 2743 if (imm.is_int32()) { |
| 2747 LoadImmediate(TMP, imm, pp); | 2744 subq(address, imm); |
| 2745 } else { | |
| 2746 LoadImmediate(TMP, imm); | |
| 2748 subq(address, TMP); | 2747 subq(address, TMP); |
| 2749 } else { | |
| 2750 subq(address, imm); | |
| 2751 } | 2748 } |
| 2752 } | 2749 } |
| 2753 } else { | 2750 } else { |
| 2754 AddImmediate(address, Immediate(-value), pp); | 2751 AddImmediate(address, Immediate(-value)); |
| 2755 } | 2752 } |
| 2756 } | 2753 } |
| 2757 | 2754 |
| 2758 | 2755 |
| 2759 void Assembler::Drop(intptr_t stack_elements, Register tmp) { | 2756 void Assembler::Drop(intptr_t stack_elements, Register tmp) { |
| 2760 ASSERT(stack_elements >= 0); | 2757 ASSERT(stack_elements >= 0); |
| 2761 if (stack_elements <= 4) { | 2758 if (stack_elements <= 4) { |
| 2762 for (intptr_t i = 0; i < stack_elements; i++) { | 2759 for (intptr_t i = 0; i < stack_elements; i++) { |
| 2763 popq(tmp); | 2760 popq(tmp); |
| 2764 } | 2761 } |
| (...skipping 15 matching lines...) Expand all Loading... | |
| 2780 // If the raw smi does not fit into a 32-bit signed int, then we'll keep | 2777 // If the raw smi does not fit into a 32-bit signed int, then we'll keep |
| 2781 // the raw value in the object pool. | 2778 // the raw value in the object pool. |
| 2782 return !Utils::IsInt(32, reinterpret_cast<int64_t>(object.raw())); | 2779 return !Utils::IsInt(32, reinterpret_cast<int64_t>(object.raw())); |
| 2783 } | 2780 } |
| 2784 ASSERT(object.IsNotTemporaryScopedHandle()); | 2781 ASSERT(object.IsNotTemporaryScopedHandle()); |
| 2785 ASSERT(object.IsOld()); | 2782 ASSERT(object.IsOld()); |
| 2786 return true; | 2783 return true; |
| 2787 } | 2784 } |
| 2788 | 2785 |
| 2789 | 2786 |
| 2790 void Assembler::LoadWordFromPoolOffset(Register dst, Register pp, | 2787 void Assembler::LoadWordFromPoolOffset(Register dst, int32_t offset) { |
| 2791 int32_t offset) { | 2788 ASSERT(constant_pool_allowed()); |
| 2789 ASSERT(dst != PP); | |
| 2792 // This sequence must be of fixed size. AddressBaseImm32 | 2790 // This sequence must be of fixed size. AddressBaseImm32 |
| 2793 // forces the address operand to use a fixed-size imm32 encoding. | 2791 // forces the address operand to use a fixed-size imm32 encoding. |
| 2794 movq(dst, Address::AddressBaseImm32(pp, offset)); | 2792 movq(dst, Address::AddressBaseImm32(PP, offset)); |
| 2795 } | 2793 } |
| 2796 | 2794 |
| 2797 | 2795 |
| 2798 void Assembler::LoadIsolate(Register dst) { | 2796 void Assembler::LoadIsolate(Register dst) { |
| 2799 movq(dst, Address(THR, Thread::isolate_offset())); | 2797 movq(dst, Address(THR, Thread::isolate_offset())); |
| 2800 } | 2798 } |
| 2801 | 2799 |
| 2802 | 2800 |
| 2803 void Assembler::LoadObjectHelper(Register dst, | 2801 void Assembler::LoadObjectHelper(Register dst, |
| 2804 const Object& object, | 2802 const Object& object, |
| 2805 Register pp, | |
| 2806 bool is_unique) { | 2803 bool is_unique) { |
| 2807 if (Thread::CanLoadFromThread(object)) { | 2804 if (Thread::CanLoadFromThread(object)) { |
| 2808 movq(dst, Address(THR, Thread::OffsetFromThread(object))); | 2805 movq(dst, Address(THR, Thread::OffsetFromThread(object))); |
| 2809 } else if (CanLoadFromObjectPool(object)) { | 2806 } else if (CanLoadFromObjectPool(object)) { |
| 2810 const int32_t offset = ObjectPool::element_offset( | 2807 const int32_t offset = ObjectPool::element_offset( |
| 2811 is_unique ? object_pool_wrapper_.AddObject(object) | 2808 is_unique ? object_pool_wrapper_.AddObject(object) |
| 2812 : object_pool_wrapper_.FindObject(object)); | 2809 : object_pool_wrapper_.FindObject(object)); |
| 2813 LoadWordFromPoolOffset(dst, pp, offset - kHeapObjectTag); | 2810 LoadWordFromPoolOffset(dst, offset - kHeapObjectTag); |
| 2814 } else { | 2811 } else { |
| 2815 ASSERT(object.IsSmi() || object.InVMHeap()); | 2812 ASSERT(object.IsSmi() || object.InVMHeap()); |
| 2816 LoadImmediate(dst, Immediate(reinterpret_cast<int64_t>(object.raw())), pp); | 2813 LoadImmediate(dst, Immediate(reinterpret_cast<int64_t>(object.raw()))); |
| 2817 } | 2814 } |
| 2818 } | 2815 } |
| 2819 | 2816 |
| 2820 | 2817 |
| 2821 void Assembler::LoadObject(Register dst, const Object& object, Register pp) { | 2818 void Assembler::LoadFunctionFromNewPool(Register dst, |
|
zra
2015/07/31 17:37:28
Maybe FromCalleePool instead of FromNewPool.
regis
2015/07/31 19:33:59
Done.
| |
| 2822 LoadObjectHelper(dst, object, pp, false); | 2819 const Function& function, |
| 2820 Register new_pp) { | |
| 2821 ASSERT(!constant_pool_allowed()); | |
| 2822 ASSERT(new_pp != PP); | |
| 2823 const int32_t offset = | |
| 2824 ObjectPool::element_offset(object_pool_wrapper_.FindObject(function)); | |
| 2825 movq(dst, Address::AddressBaseImm32(new_pp, offset - kHeapObjectTag)); | |
| 2823 } | 2826 } |
| 2824 | 2827 |
| 2825 | 2828 |
| 2826 void Assembler::LoadUniqueObject(Register dst, | 2829 void Assembler::LoadObject(Register dst, const Object& object) { |
| 2827 const Object& object, | 2830 LoadObjectHelper(dst, object, false); |
| 2828 Register pp) { | |
| 2829 LoadObjectHelper(dst, object, pp, true); | |
| 2830 } | 2831 } |
| 2831 | 2832 |
| 2832 | 2833 |
| 2833 void Assembler::StoreObject(const Address& dst, const Object& object, | 2834 void Assembler::LoadUniqueObject(Register dst, const Object& object) { |
| 2834 Register pp) { | 2835 LoadObjectHelper(dst, object, true); |
| 2836 } | |
| 2837 | |
| 2838 | |
| 2839 void Assembler::StoreObject(const Address& dst, const Object& object) { | |
| 2835 if (Thread::CanLoadFromThread(object)) { | 2840 if (Thread::CanLoadFromThread(object)) { |
| 2836 movq(TMP, Address(THR, Thread::OffsetFromThread(object))); | 2841 movq(TMP, Address(THR, Thread::OffsetFromThread(object))); |
| 2837 movq(dst, TMP); | 2842 movq(dst, TMP); |
| 2838 } else if (CanLoadFromObjectPool(object)) { | 2843 } else if (CanLoadFromObjectPool(object)) { |
| 2839 LoadObject(TMP, object, pp); | 2844 LoadObject(TMP, object); |
| 2840 movq(dst, TMP); | 2845 movq(dst, TMP); |
| 2841 } else { | 2846 } else { |
| 2842 MoveImmediate(dst, Immediate(reinterpret_cast<int64_t>(object.raw())), pp); | 2847 MoveImmediate(dst, Immediate(reinterpret_cast<int64_t>(object.raw()))); |
| 2843 } | 2848 } |
| 2844 } | 2849 } |
| 2845 | 2850 |
| 2846 | 2851 |
| 2847 void Assembler::PushObject(const Object& object, Register pp) { | 2852 void Assembler::PushObject(const Object& object) { |
| 2848 if (Thread::CanLoadFromThread(object)) { | 2853 if (Thread::CanLoadFromThread(object)) { |
| 2849 pushq(Address(THR, Thread::OffsetFromThread(object))); | 2854 pushq(Address(THR, Thread::OffsetFromThread(object))); |
| 2850 } else if (CanLoadFromObjectPool(object)) { | 2855 } else if (CanLoadFromObjectPool(object)) { |
| 2851 LoadObject(TMP, object, pp); | 2856 LoadObject(TMP, object); |
| 2852 pushq(TMP); | 2857 pushq(TMP); |
| 2853 } else { | 2858 } else { |
| 2854 PushImmediate(Immediate(reinterpret_cast<int64_t>(object.raw())), pp); | 2859 PushImmediate(Immediate(reinterpret_cast<int64_t>(object.raw()))); |
| 2855 } | 2860 } |
| 2856 } | 2861 } |
| 2857 | 2862 |
| 2858 | 2863 |
| 2859 void Assembler::CompareObject(Register reg, const Object& object, Register pp) { | 2864 void Assembler::CompareObject(Register reg, const Object& object) { |
| 2860 if (Thread::CanLoadFromThread(object)) { | 2865 if (Thread::CanLoadFromThread(object)) { |
| 2861 cmpq(reg, Address(THR, Thread::OffsetFromThread(object))); | 2866 cmpq(reg, Address(THR, Thread::OffsetFromThread(object))); |
| 2862 } else if (CanLoadFromObjectPool(object)) { | 2867 } else if (CanLoadFromObjectPool(object)) { |
| 2863 const int32_t offset = | 2868 const int32_t offset = |
| 2864 ObjectPool::element_offset(object_pool_wrapper_.FindObject(object)); | 2869 ObjectPool::element_offset(object_pool_wrapper_.FindObject(object)); |
| 2865 cmpq(reg, Address(pp, offset-kHeapObjectTag)); | 2870 cmpq(reg, Address(PP, offset-kHeapObjectTag)); |
| 2866 } else { | 2871 } else { |
| 2867 CompareImmediate( | 2872 CompareImmediate( |
| 2868 reg, Immediate(reinterpret_cast<int64_t>(object.raw())), pp); | 2873 reg, Immediate(reinterpret_cast<int64_t>(object.raw()))); |
| 2869 } | 2874 } |
| 2870 } | 2875 } |
| 2871 | 2876 |
| 2872 | 2877 |
| 2873 intptr_t Assembler::FindImmediate(int64_t imm) { | 2878 intptr_t Assembler::FindImmediate(int64_t imm) { |
| 2874 return object_pool_wrapper_.FindImmediate(imm); | 2879 return object_pool_wrapper_.FindImmediate(imm); |
| 2875 } | 2880 } |
| 2876 | 2881 |
| 2877 | 2882 |
| 2878 bool Assembler::CanLoadImmediateFromPool(const Immediate& imm, Register pp) { | 2883 void Assembler::LoadImmediate(Register reg, const Immediate& imm) { |
| 2879 if (!constant_pool_allowed()) { | 2884 if (imm.is_int32() || !constant_pool_allowed()) { |
| 2880 return false; | 2885 movq(reg, imm); |
| 2881 } | 2886 } else { |
| 2882 return !imm.is_int32() && (pp != kNoRegister); | |
| 2883 } | |
| 2884 | |
| 2885 | |
| 2886 void Assembler::LoadImmediate(Register reg, const Immediate& imm, Register pp) { | |
| 2887 if (CanLoadImmediateFromPool(imm, pp)) { | |
| 2888 int32_t offset = ObjectPool::element_offset(FindImmediate(imm.value())); | 2887 int32_t offset = ObjectPool::element_offset(FindImmediate(imm.value())); |
| 2889 LoadWordFromPoolOffset(reg, pp, offset - kHeapObjectTag); | 2888 LoadWordFromPoolOffset(reg, offset - kHeapObjectTag); |
| 2890 } else { | |
| 2891 movq(reg, imm); | |
| 2892 } | 2889 } |
| 2893 } | 2890 } |
| 2894 | 2891 |
| 2895 | 2892 |
| 2896 void Assembler::MoveImmediate(const Address& dst, const Immediate& imm, | 2893 void Assembler::MoveImmediate(const Address& dst, const Immediate& imm) { |
| 2897 Register pp) { | 2894 if (imm.is_int32()) { |
| 2898 if (CanLoadImmediateFromPool(imm, pp)) { | 2895 movq(dst, imm); |
| 2899 LoadImmediate(TMP, imm, pp); | 2896 } else { |
| 2897 LoadImmediate(TMP, imm); | |
| 2900 movq(dst, TMP); | 2898 movq(dst, TMP); |
| 2901 } else { | |
| 2902 movq(dst, imm); | |
| 2903 } | 2899 } |
| 2904 } | 2900 } |
| 2905 | 2901 |
| 2906 | 2902 |
| 2907 // Destroys the value register. | 2903 // Destroys the value register. |
| 2908 void Assembler::StoreIntoObjectFilterNoSmi(Register object, | 2904 void Assembler::StoreIntoObjectFilterNoSmi(Register object, |
| 2909 Register value, | 2905 Register value, |
| 2910 Label* no_update) { | 2906 Label* no_update) { |
| 2911 COMPILE_ASSERT((kNewObjectAlignmentOffset == kWordSize) && | 2907 COMPILE_ASSERT((kNewObjectAlignmentOffset == kWordSize) && |
| 2912 (kOldObjectAlignmentOffset == 0)); | 2908 (kOldObjectAlignmentOffset == 0)); |
| (...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3078 Bind(&done); | 3074 Bind(&done); |
| 3079 popq(value); | 3075 popq(value); |
| 3080 #endif // defined(DEBUG) | 3076 #endif // defined(DEBUG) |
| 3081 // No store buffer update. | 3077 // No store buffer update. |
| 3082 } | 3078 } |
| 3083 | 3079 |
| 3084 | 3080 |
| 3085 void Assembler::StoreIntoObjectNoBarrier(Register object, | 3081 void Assembler::StoreIntoObjectNoBarrier(Register object, |
| 3086 const Address& dest, | 3082 const Address& dest, |
| 3087 const Object& value, | 3083 const Object& value, |
| 3088 Register pp, | |
| 3089 FieldContent old_content) { | 3084 FieldContent old_content) { |
| 3090 VerifyHeapWord(dest, old_content); | 3085 VerifyHeapWord(dest, old_content); |
| 3091 if (VerifiedMemory::enabled()) { | 3086 if (VerifiedMemory::enabled()) { |
| 3092 Register temp = (pp == RCX) ? RDX : RCX; | 3087 const Register temp = RCX; |
| 3093 pushq(temp); | 3088 pushq(temp); |
| 3094 leaq(temp, dest); | 3089 leaq(temp, dest); |
| 3095 StoreObject(Address(temp, 0), value, pp); | 3090 StoreObject(Address(temp, 0), value); |
| 3096 StoreObject(Address(temp, VerifiedMemory::offset()), value, pp); | 3091 StoreObject(Address(temp, VerifiedMemory::offset()), value); |
| 3097 popq(temp); | 3092 popq(temp); |
| 3098 } else { | 3093 } else { |
| 3099 StoreObject(dest, value, pp); | 3094 StoreObject(dest, value); |
| 3100 } | 3095 } |
| 3101 // TODO(koda): Use 'object', verify that generational barrier's not needed. | 3096 // TODO(koda): Use 'object', verify that generational barrier's not needed. |
| 3102 } | 3097 } |
| 3103 | 3098 |
| 3104 | 3099 |
| 3105 void Assembler::StoreIntoSmiField(const Address& dest, Register value) { | 3100 void Assembler::StoreIntoSmiField(const Address& dest, Register value) { |
| 3106 #if defined(DEBUG) | 3101 #if defined(DEBUG) |
| 3107 Label done; | 3102 Label done; |
| 3108 testq(value, Immediate(kHeapObjectTag)); | 3103 testq(value, Immediate(kHeapObjectTag)); |
| 3109 j(ZERO, &done); | 3104 j(ZERO, &done); |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3146 } | 3141 } |
| 3147 | 3142 |
| 3148 | 3143 |
| 3149 void Assembler::DoubleNegate(XmmRegister d) { | 3144 void Assembler::DoubleNegate(XmmRegister d) { |
| 3150 static const struct ALIGN16 { | 3145 static const struct ALIGN16 { |
| 3151 uint64_t a; | 3146 uint64_t a; |
| 3152 uint64_t b; | 3147 uint64_t b; |
| 3153 } double_negate_constant = | 3148 } double_negate_constant = |
| 3154 {0x8000000000000000LL, 0x8000000000000000LL}; | 3149 {0x8000000000000000LL, 0x8000000000000000LL}; |
| 3155 LoadImmediate( | 3150 LoadImmediate( |
| 3156 TMP, Immediate(reinterpret_cast<intptr_t>(&double_negate_constant)), PP); | 3151 TMP, Immediate(reinterpret_cast<intptr_t>(&double_negate_constant))); |
| 3157 xorpd(d, Address(TMP, 0)); | 3152 xorpd(d, Address(TMP, 0)); |
| 3158 } | 3153 } |
| 3159 | 3154 |
| 3160 | 3155 |
| 3161 void Assembler::DoubleAbs(XmmRegister reg) { | 3156 void Assembler::DoubleAbs(XmmRegister reg) { |
| 3162 static const struct ALIGN16 { | 3157 static const struct ALIGN16 { |
| 3163 uint64_t a; | 3158 uint64_t a; |
| 3164 uint64_t b; | 3159 uint64_t b; |
| 3165 } double_abs_constant = | 3160 } double_abs_constant = |
| 3166 {0x7FFFFFFFFFFFFFFFLL, 0x7FFFFFFFFFFFFFFFLL}; | 3161 {0x7FFFFFFFFFFFFFFFLL, 0x7FFFFFFFFFFFFFFFLL}; |
| 3167 LoadImmediate(TMP, | 3162 LoadImmediate(TMP, |
| 3168 Immediate(reinterpret_cast<intptr_t>(&double_abs_constant)), PP); | 3163 Immediate(reinterpret_cast<intptr_t>(&double_abs_constant))); |
| 3169 andpd(reg, Address(TMP, 0)); | 3164 andpd(reg, Address(TMP, 0)); |
| 3170 } | 3165 } |
| 3171 | 3166 |
| 3172 | 3167 |
| 3173 void Assembler::Stop(const char* message, bool fixed_length_encoding) { | 3168 void Assembler::Stop(const char* message, bool fixed_length_encoding) { |
| 3174 int64_t message_address = reinterpret_cast<int64_t>(message); | 3169 int64_t message_address = reinterpret_cast<int64_t>(message); |
| 3175 if (FLAG_print_stop_message) { | 3170 if (FLAG_print_stop_message) { |
| 3176 pushq(TMP); // Preserve TMP register. | 3171 pushq(TMP); // Preserve TMP register. |
| 3177 pushq(RDI); // Preserve RDI register. | 3172 pushq(RDI); // Preserve RDI register. |
| 3178 if (fixed_length_encoding) { | 3173 if (fixed_length_encoding) { |
| 3179 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 3174 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 3180 EmitRegisterREX(RDI, REX_W); | 3175 EmitRegisterREX(RDI, REX_W); |
| 3181 EmitUint8(0xB8 | (RDI & 7)); | 3176 EmitUint8(0xB8 | (RDI & 7)); |
| 3182 EmitInt64(message_address); | 3177 EmitInt64(message_address); |
| 3183 } else { | 3178 } else { |
| 3184 LoadImmediate(RDI, Immediate(message_address), PP); | 3179 LoadImmediate(RDI, Immediate(message_address)); |
| 3185 } | 3180 } |
| 3186 call(&StubCode::PrintStopMessageLabel()); | 3181 call(&StubCode::PrintStopMessageLabel()); |
| 3187 popq(RDI); // Restore RDI register. | 3182 popq(RDI); // Restore RDI register. |
| 3188 popq(TMP); // Restore TMP register. | 3183 popq(TMP); // Restore TMP register. |
| 3189 } else { | 3184 } else { |
| 3190 // Emit the lower half and the higher half of the message address as | 3185 // Emit the lower half and the higher half of the message address as |
| 3191 // immediate operands in the test rax instructions. | 3186 // immediate operands in the test rax instructions. |
| 3192 testl(RAX, Immediate(Utils::Low32Bits(message_address))); | 3187 testl(RAX, Immediate(Utils::Low32Bits(message_address))); |
| 3193 testl(RAX, Immediate(Utils::High32Bits(message_address))); | 3188 testl(RAX, Immediate(Utils::High32Bits(message_address))); |
| 3194 } | 3189 } |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3252 if (OS::ActivationFrameAlignment() > 1) { | 3247 if (OS::ActivationFrameAlignment() > 1) { |
| 3253 andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1))); | 3248 andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1))); |
| 3254 } | 3249 } |
| 3255 } | 3250 } |
| 3256 | 3251 |
| 3257 | 3252 |
| 3258 void Assembler::PushRegisters(intptr_t cpu_register_set, | 3253 void Assembler::PushRegisters(intptr_t cpu_register_set, |
| 3259 intptr_t xmm_register_set) { | 3254 intptr_t xmm_register_set) { |
| 3260 const intptr_t xmm_regs_count = RegisterSet::RegisterCount(xmm_register_set); | 3255 const intptr_t xmm_regs_count = RegisterSet::RegisterCount(xmm_register_set); |
| 3261 if (xmm_regs_count > 0) { | 3256 if (xmm_regs_count > 0) { |
| 3262 AddImmediate(RSP, Immediate(-xmm_regs_count * kFpuRegisterSize), PP); | 3257 AddImmediate(RSP, Immediate(-xmm_regs_count * kFpuRegisterSize)); |
| 3263 // Store XMM registers with the lowest register number at the lowest | 3258 // Store XMM registers with the lowest register number at the lowest |
| 3264 // address. | 3259 // address. |
| 3265 intptr_t offset = 0; | 3260 intptr_t offset = 0; |
| 3266 for (intptr_t reg_idx = 0; reg_idx < kNumberOfXmmRegisters; ++reg_idx) { | 3261 for (intptr_t reg_idx = 0; reg_idx < kNumberOfXmmRegisters; ++reg_idx) { |
| 3267 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx); | 3262 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx); |
| 3268 if (RegisterSet::Contains(xmm_register_set, xmm_reg)) { | 3263 if (RegisterSet::Contains(xmm_register_set, xmm_reg)) { |
| 3269 movups(Address(RSP, offset), xmm_reg); | 3264 movups(Address(RSP, offset), xmm_reg); |
| 3270 offset += kFpuRegisterSize; | 3265 offset += kFpuRegisterSize; |
| 3271 } | 3266 } |
| 3272 } | 3267 } |
| (...skipping 28 matching lines...) Expand all Loading... | |
| 3301 // XMM registers have the lowest register number at the lowest address. | 3296 // XMM registers have the lowest register number at the lowest address. |
| 3302 intptr_t offset = 0; | 3297 intptr_t offset = 0; |
| 3303 for (intptr_t reg_idx = 0; reg_idx < kNumberOfXmmRegisters; ++reg_idx) { | 3298 for (intptr_t reg_idx = 0; reg_idx < kNumberOfXmmRegisters; ++reg_idx) { |
| 3304 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx); | 3299 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx); |
| 3305 if (RegisterSet::Contains(xmm_register_set, xmm_reg)) { | 3300 if (RegisterSet::Contains(xmm_register_set, xmm_reg)) { |
| 3306 movups(xmm_reg, Address(RSP, offset)); | 3301 movups(xmm_reg, Address(RSP, offset)); |
| 3307 offset += kFpuRegisterSize; | 3302 offset += kFpuRegisterSize; |
| 3308 } | 3303 } |
| 3309 } | 3304 } |
| 3310 ASSERT(offset == (xmm_regs_count * kFpuRegisterSize)); | 3305 ASSERT(offset == (xmm_regs_count * kFpuRegisterSize)); |
| 3311 AddImmediate(RSP, Immediate(offset), PP); | 3306 AddImmediate(RSP, Immediate(offset)); |
| 3312 } | 3307 } |
| 3313 } | 3308 } |
| 3314 | 3309 |
| 3315 | 3310 |
| 3316 void Assembler::EnterCallRuntimeFrame(intptr_t frame_space) { | 3311 void Assembler::EnterCallRuntimeFrame(intptr_t frame_space) { |
| 3317 EnterFrame(0); | 3312 EnterFrame(0); |
| 3318 | 3313 |
| 3319 // TODO(vegorov): avoid saving FpuTMP, it is used only as scratch. | 3314 // TODO(vegorov): avoid saving FpuTMP, it is used only as scratch. |
| 3320 PushRegisters(CallingConventions::kVolatileCpuRegisters, | 3315 PushRegisters(CallingConventions::kVolatileCpuRegisters, |
| 3321 CallingConventions::kVolatileXmmRegisters); | 3316 CallingConventions::kVolatileXmmRegisters); |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3371 | 3366 |
| 3372 void Assembler::LoadPoolPointer(Register pp) { | 3367 void Assembler::LoadPoolPointer(Register pp) { |
| 3373 // Load new pool pointer. | 3368 // Load new pool pointer. |
| 3374 const intptr_t kRIPRelativeMovqSize = 7; | 3369 const intptr_t kRIPRelativeMovqSize = 7; |
| 3375 const intptr_t entry_to_rip_offset = CodeSize() + kRIPRelativeMovqSize; | 3370 const intptr_t entry_to_rip_offset = CodeSize() + kRIPRelativeMovqSize; |
| 3376 const intptr_t object_pool_pc_dist = | 3371 const intptr_t object_pool_pc_dist = |
| 3377 Instructions::HeaderSize() - Instructions::object_pool_offset(); | 3372 Instructions::HeaderSize() - Instructions::object_pool_offset(); |
| 3378 movq(pp, Address::AddressRIPRelative( | 3373 movq(pp, Address::AddressRIPRelative( |
| 3379 -entry_to_rip_offset - object_pool_pc_dist)); | 3374 -entry_to_rip_offset - object_pool_pc_dist)); |
| 3380 ASSERT(CodeSize() == entry_to_rip_offset); | 3375 ASSERT(CodeSize() == entry_to_rip_offset); |
| 3376 set_constant_pool_allowed(pp == PP); | |
| 3381 } | 3377 } |
| 3382 | 3378 |
| 3383 | 3379 |
| 3384 void Assembler::EnterDartFrameWithInfo(intptr_t frame_size, | 3380 void Assembler::EnterDartFrameWithInfo(intptr_t frame_size, |
| 3385 Register new_pp, | 3381 Register new_pp, |
| 3386 Register pc_marker_override) { | 3382 Register pc_marker_override) { |
| 3383 ASSERT(!constant_pool_allowed()); | |
| 3387 EnterFrame(0); | 3384 EnterFrame(0); |
| 3388 pushq(pc_marker_override); | 3385 pushq(pc_marker_override); |
| 3389 pushq(PP); | 3386 pushq(PP); |
| 3390 movq(PP, new_pp); | 3387 movq(PP, new_pp); |
| 3388 set_constant_pool_allowed(true); | |
| 3391 if (frame_size != 0) { | 3389 if (frame_size != 0) { |
| 3392 subq(RSP, Immediate(frame_size)); | 3390 subq(RSP, Immediate(frame_size)); |
| 3393 } | 3391 } |
| 3394 } | 3392 } |
| 3395 | 3393 |
| 3396 | 3394 |
| 3397 void Assembler::LeaveDartFrame() { | 3395 void Assembler::LeaveDartFrame() { |
| 3396 // LeaveDartFrame is called from stubs (pp disallowed) and from Dart code (pp | |
| 3397 // allowed), so there is no point in checking the current value of | |
| 3398 // constant_pool_allowed(). | |
| 3399 set_constant_pool_allowed(false); | |
| 3398 // Restore caller's PP register that was pushed in EnterDartFrame. | 3400 // Restore caller's PP register that was pushed in EnterDartFrame. |
| 3399 movq(PP, Address(RBP, (kSavedCallerPpSlotFromFp * kWordSize))); | 3401 movq(PP, Address(RBP, (kSavedCallerPpSlotFromFp * kWordSize))); |
| 3400 LeaveFrame(); | 3402 LeaveFrame(); |
| 3401 } | 3403 } |
| 3402 | 3404 |
| 3403 | 3405 |
| 3404 // On entry to a function compiled for OSR, the caller's frame pointer, the | 3406 // On entry to a function compiled for OSR, the caller's frame pointer, the |
| 3405 // stack locals, and any copied parameters are already in place. The frame | 3407 // stack locals, and any copied parameters are already in place. The frame |
| 3406 // pointer is already set up. The PC marker is not correct for the | 3408 // pointer is already set up. The PC marker is not correct for the |
| 3407 // optimized function and there may be extra space for spill slots to | 3409 // optimized function and there may be extra space for spill slots to |
| 3408 // allocate. | 3410 // allocate. |
| 3409 void Assembler::EnterOsrFrame(intptr_t extra_size, | 3411 void Assembler::EnterOsrFrame(intptr_t extra_size, |
| 3410 Register new_pp, | 3412 Register new_pp, |
| 3411 Register pc_marker_override) { | 3413 Register pc_marker_override) { |
| 3414 ASSERT(!constant_pool_allowed()); | |
| 3412 if (prologue_offset_ == -1) { | 3415 if (prologue_offset_ == -1) { |
| 3413 Comment("PrologueOffset = %" Pd "", CodeSize()); | 3416 Comment("PrologueOffset = %" Pd "", CodeSize()); |
| 3414 prologue_offset_ = CodeSize(); | 3417 prologue_offset_ = CodeSize(); |
| 3415 } | 3418 } |
| 3416 movq(Address(RBP, kPcMarkerSlotFromFp * kWordSize), pc_marker_override); | 3419 movq(Address(RBP, kPcMarkerSlotFromFp * kWordSize), pc_marker_override); |
| 3417 movq(PP, new_pp); | 3420 movq(PP, new_pp); |
| 3421 set_constant_pool_allowed(true); | |
| 3418 if (extra_size != 0) { | 3422 if (extra_size != 0) { |
| 3419 subq(RSP, Immediate(extra_size)); | 3423 subq(RSP, Immediate(extra_size)); |
| 3420 } | 3424 } |
| 3421 } | 3425 } |
| 3422 | 3426 |
| 3423 | 3427 |
| 3424 void Assembler::EnterStubFrame() { | 3428 void Assembler::EnterStubFrame() { |
| 3429 set_constant_pool_allowed(false); | |
| 3425 EnterFrame(0); | 3430 EnterFrame(0); |
| 3426 pushq(Immediate(0)); // Push 0 in the saved PC area for stub frames. | 3431 pushq(Immediate(0)); // Push 0 in the saved PC area for stub frames. |
| 3427 pushq(PP); // Save caller's pool pointer | 3432 pushq(PP); // Save caller's pool pointer |
| 3428 LoadPoolPointer(PP); | 3433 LoadPoolPointer(PP); |
| 3429 } | 3434 } |
| 3430 | 3435 |
| 3431 | 3436 |
| 3432 void Assembler::LeaveStubFrame() { | 3437 void Assembler::LeaveStubFrame() { |
| 3438 set_constant_pool_allowed(false); | |
| 3433 // Restore caller's PP register that was pushed in EnterStubFrame. | 3439 // Restore caller's PP register that was pushed in EnterStubFrame. |
| 3434 movq(PP, Address(RBP, (kSavedCallerPpSlotFromFp * kWordSize))); | 3440 movq(PP, Address(RBP, (kSavedCallerPpSlotFromFp * kWordSize))); |
| 3435 LeaveFrame(); | 3441 LeaveFrame(); |
| 3436 } | 3442 } |
| 3437 | 3443 |
| 3438 | 3444 |
| 3439 void Assembler::MaybeTraceAllocation(intptr_t cid, | 3445 void Assembler::MaybeTraceAllocation(intptr_t cid, |
| 3440 Label* trace, | 3446 Label* trace, |
| 3441 bool near_jump, | 3447 bool near_jump, |
| 3442 bool inline_isolate) { | 3448 bool inline_isolate) { |
| (...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3514 UpdateAllocationStats(cid, space, inline_isolate); | 3520 UpdateAllocationStats(cid, space, inline_isolate); |
| 3515 Register temp_reg = TMP; | 3521 Register temp_reg = TMP; |
| 3516 intptr_t size_offset = ClassTable::SizeOffsetFor(cid, space == Heap::kNew); | 3522 intptr_t size_offset = ClassTable::SizeOffsetFor(cid, space == Heap::kNew); |
| 3517 addq(Address(temp_reg, size_offset), Immediate(size_in_bytes)); | 3523 addq(Address(temp_reg, size_offset), Immediate(size_in_bytes)); |
| 3518 } | 3524 } |
| 3519 | 3525 |
| 3520 | 3526 |
| 3521 void Assembler::TryAllocate(const Class& cls, | 3527 void Assembler::TryAllocate(const Class& cls, |
| 3522 Label* failure, | 3528 Label* failure, |
| 3523 bool near_jump, | 3529 bool near_jump, |
| 3524 Register instance_reg, | 3530 Register instance_reg) { |
| 3525 Register pp) { | |
| 3526 ASSERT(failure != NULL); | 3531 ASSERT(failure != NULL); |
| 3527 if (FLAG_inline_alloc) { | 3532 if (FLAG_inline_alloc) { |
| 3528 // If this allocation is traced, program will jump to failure path | 3533 // If this allocation is traced, program will jump to failure path |
| 3529 // (i.e. the allocation stub) which will allocate the object and trace the | 3534 // (i.e. the allocation stub) which will allocate the object and trace the |
| 3530 // allocation call site. | 3535 // allocation call site. |
| 3531 MaybeTraceAllocation(cls.id(), failure, near_jump); | 3536 MaybeTraceAllocation(cls.id(), failure, near_jump); |
| 3532 Heap* heap = Isolate::Current()->heap(); | 3537 Heap* heap = Isolate::Current()->heap(); |
| 3533 const intptr_t instance_size = cls.instance_size(); | 3538 const intptr_t instance_size = cls.instance_size(); |
| 3534 Heap::Space space = heap->SpaceForAllocation(cls.id()); | 3539 Heap::Space space = heap->SpaceForAllocation(cls.id()); |
| 3535 LoadImmediate(TMP, Immediate(heap->TopAddress(space)), pp); | 3540 LoadImmediate(TMP, Immediate(heap->TopAddress(space))); |
| 3536 movq(instance_reg, Address(TMP, 0)); | 3541 movq(instance_reg, Address(TMP, 0)); |
| 3537 AddImmediate(instance_reg, Immediate(instance_size), pp); | 3542 AddImmediate(instance_reg, Immediate(instance_size)); |
| 3538 // instance_reg: potential next object start. | 3543 // instance_reg: potential next object start. |
| 3539 LoadImmediate(TMP, Immediate(heap->EndAddress(space)), pp); | 3544 LoadImmediate(TMP, Immediate(heap->EndAddress(space))); |
| 3540 cmpq(instance_reg, Address(TMP, 0)); | 3545 cmpq(instance_reg, Address(TMP, 0)); |
| 3541 j(ABOVE_EQUAL, failure, near_jump); | 3546 j(ABOVE_EQUAL, failure, near_jump); |
| 3542 // Successfully allocated the object, now update top to point to | 3547 // Successfully allocated the object, now update top to point to |
| 3543 // next object start and store the class in the class field of object. | 3548 // next object start and store the class in the class field of object. |
| 3544 LoadImmediate(TMP, Immediate(heap->TopAddress(space)), pp); | 3549 LoadImmediate(TMP, Immediate(heap->TopAddress(space))); |
| 3545 movq(Address(TMP, 0), instance_reg); | 3550 movq(Address(TMP, 0), instance_reg); |
| 3546 UpdateAllocationStats(cls.id(), space); | 3551 UpdateAllocationStats(cls.id(), space); |
| 3547 ASSERT(instance_size >= kHeapObjectTag); | 3552 ASSERT(instance_size >= kHeapObjectTag); |
| 3548 AddImmediate(instance_reg, Immediate(kHeapObjectTag - instance_size), pp); | 3553 AddImmediate(instance_reg, Immediate(kHeapObjectTag - instance_size)); |
| 3549 uword tags = 0; | 3554 uword tags = 0; |
| 3550 tags = RawObject::SizeTag::update(instance_size, tags); | 3555 tags = RawObject::SizeTag::update(instance_size, tags); |
| 3551 ASSERT(cls.id() != kIllegalCid); | 3556 ASSERT(cls.id() != kIllegalCid); |
| 3552 tags = RawObject::ClassIdTag::update(cls.id(), tags); | 3557 tags = RawObject::ClassIdTag::update(cls.id(), tags); |
| 3553 MoveImmediate(FieldAddress(instance_reg, Object::tags_offset()), | 3558 MoveImmediate(FieldAddress(instance_reg, Object::tags_offset()), |
| 3554 Immediate(tags), pp); | 3559 Immediate(tags)); |
| 3555 } else { | 3560 } else { |
| 3556 jmp(failure); | 3561 jmp(failure); |
| 3557 } | 3562 } |
| 3558 } | 3563 } |
| 3559 | 3564 |
| 3560 | 3565 |
| 3561 void Assembler::TryAllocateArray(intptr_t cid, | 3566 void Assembler::TryAllocateArray(intptr_t cid, |
| 3562 intptr_t instance_size, | 3567 intptr_t instance_size, |
| 3563 Label* failure, | 3568 Label* failure, |
| 3564 bool near_jump, | 3569 bool near_jump, |
| (...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3744 void Assembler::LoadClassId(Register result, Register object) { | 3749 void Assembler::LoadClassId(Register result, Register object) { |
| 3745 ASSERT(RawObject::kClassIdTagPos == kBitsPerInt32); | 3750 ASSERT(RawObject::kClassIdTagPos == kBitsPerInt32); |
| 3746 ASSERT(RawObject::kClassIdTagSize == kBitsPerInt32); | 3751 ASSERT(RawObject::kClassIdTagSize == kBitsPerInt32); |
| 3747 ASSERT(sizeof(classid_t) == sizeof(uint32_t)); | 3752 ASSERT(sizeof(classid_t) == sizeof(uint32_t)); |
| 3748 const intptr_t class_id_offset = Object::tags_offset() + | 3753 const intptr_t class_id_offset = Object::tags_offset() + |
| 3749 RawObject::kClassIdTagPos / kBitsPerByte; | 3754 RawObject::kClassIdTagPos / kBitsPerByte; |
| 3750 movl(result, FieldAddress(object, class_id_offset)); | 3755 movl(result, FieldAddress(object, class_id_offset)); |
| 3751 } | 3756 } |
| 3752 | 3757 |
| 3753 | 3758 |
| 3754 void Assembler::LoadClassById(Register result, Register class_id, Register pp) { | 3759 void Assembler::LoadClassById(Register result, Register class_id) { |
| 3755 ASSERT(result != class_id); | 3760 ASSERT(result != class_id); |
| 3756 LoadIsolate(result); | 3761 LoadIsolate(result); |
| 3757 const intptr_t offset = | 3762 const intptr_t offset = |
| 3758 Isolate::class_table_offset() + ClassTable::table_offset(); | 3763 Isolate::class_table_offset() + ClassTable::table_offset(); |
| 3759 movq(result, Address(result, offset)); | 3764 movq(result, Address(result, offset)); |
| 3760 movq(result, Address(result, class_id, TIMES_8, 0)); | 3765 movq(result, Address(result, class_id, TIMES_8, 0)); |
| 3761 } | 3766 } |
| 3762 | 3767 |
| 3763 | 3768 |
| 3764 void Assembler::LoadClass(Register result, Register object, Register pp) { | 3769 void Assembler::LoadClass(Register result, Register object) { |
| 3765 LoadClassId(TMP, object); | 3770 LoadClassId(TMP, object); |
| 3766 LoadClassById(result, TMP, pp); | 3771 LoadClassById(result, TMP); |
| 3767 } | 3772 } |
| 3768 | 3773 |
| 3769 | 3774 |
| 3770 void Assembler::CompareClassId(Register object, intptr_t class_id) { | 3775 void Assembler::CompareClassId(Register object, intptr_t class_id) { |
| 3771 LoadClassId(TMP, object); | 3776 LoadClassId(TMP, object); |
| 3772 cmpl(TMP, Immediate(class_id)); | 3777 cmpl(TMP, Immediate(class_id)); |
| 3773 } | 3778 } |
| 3774 | 3779 |
| 3775 | 3780 |
| 3776 void Assembler::SmiUntagOrCheckClass(Register object, | 3781 void Assembler::SmiUntagOrCheckClass(Register object, |
| (...skipping 14 matching lines...) Expand all Loading... | |
| 3791 movl(TMP, Address(object, TIMES_2, class_id_offset)); | 3796 movl(TMP, Address(object, TIMES_2, class_id_offset)); |
| 3792 cmpl(TMP, Immediate(class_id)); | 3797 cmpl(TMP, Immediate(class_id)); |
| 3793 } | 3798 } |
| 3794 | 3799 |
| 3795 | 3800 |
| 3796 void Assembler::LoadClassIdMayBeSmi(Register result, Register object) { | 3801 void Assembler::LoadClassIdMayBeSmi(Register result, Register object) { |
| 3797 ASSERT(result != object); | 3802 ASSERT(result != object); |
| 3798 | 3803 |
| 3799 // Load up a null object. We only need it so we can use LoadClassId on it in | 3804 // Load up a null object. We only need it so we can use LoadClassId on it in |
| 3800 // the case that object is a Smi. | 3805 // the case that object is a Smi. |
| 3801 LoadObject(result, Object::null_object(), PP); | 3806 LoadObject(result, Object::null_object()); |
| 3802 // Check if the object is a Smi. | 3807 // Check if the object is a Smi. |
| 3803 testq(object, Immediate(kSmiTagMask)); | 3808 testq(object, Immediate(kSmiTagMask)); |
| 3804 // If the object *is* a Smi, use the null object instead. | 3809 // If the object *is* a Smi, use the null object instead. |
| 3805 cmoveq(object, result); | 3810 cmoveq(object, result); |
| 3806 // Loads either the cid of the object if it isn't a Smi, or the cid of null | 3811 // Loads either the cid of the object if it isn't a Smi, or the cid of null |
| 3807 // if it is a Smi, which will be ignored. | 3812 // if it is a Smi, which will be ignored. |
| 3808 LoadClassId(result, object); | 3813 LoadClassId(result, object); |
| 3809 | 3814 |
| 3810 movq(object, Immediate(kSmiCid)); | 3815 movq(object, Immediate(kSmiCid)); |
| 3811 // If object is a Smi, move the Smi cid into result. o/w leave alone. | 3816 // If object is a Smi, move the Smi cid into result. o/w leave alone. |
| (...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3931 | 3936 |
| 3932 | 3937 |
| 3933 const char* Assembler::FpuRegisterName(FpuRegister reg) { | 3938 const char* Assembler::FpuRegisterName(FpuRegister reg) { |
| 3934 ASSERT((0 <= reg) && (reg < kNumberOfXmmRegisters)); | 3939 ASSERT((0 <= reg) && (reg < kNumberOfXmmRegisters)); |
| 3935 return xmm_reg_names[reg]; | 3940 return xmm_reg_names[reg]; |
| 3936 } | 3941 } |
| 3937 | 3942 |
| 3938 } // namespace dart | 3943 } // namespace dart |
| 3939 | 3944 |
| 3940 #endif // defined TARGET_ARCH_X64 | 3945 #endif // defined TARGET_ARCH_X64 |
| OLD | NEW |