Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(932)

Side by Side Diff: runtime/vm/assembler_x64.cc

Issue 1268783003: Simplify constant pool usage in x64 code generator (by removing extra argument (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: address comments Created 5 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/assembler_x64.h ('k') | runtime/vm/assembler_x64_test.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // NOLINT 5 #include "vm/globals.h" // NOLINT
6 #if defined(TARGET_ARCH_X64) 6 #if defined(TARGET_ARCH_X64)
7 7
8 #include "vm/assembler.h" 8 #include "vm/assembler.h"
9 #include "vm/cpu.h" 9 #include "vm/cpu.h"
10 #include "vm/heap.h" 10 #include "vm/heap.h"
11 #include "vm/instructions.h" 11 #include "vm/instructions.h"
12 #include "vm/locations.h" 12 #include "vm/locations.h"
13 #include "vm/memory_region.h" 13 #include "vm/memory_region.h"
14 #include "vm/runtime_entry.h" 14 #include "vm/runtime_entry.h"
15 #include "vm/stack_frame.h" 15 #include "vm/stack_frame.h"
16 #include "vm/stub_code.h" 16 #include "vm/stub_code.h"
17 17
18 namespace dart { 18 namespace dart {
19 19
20 DEFINE_FLAG(bool, print_stop_message, true, "Print stop message."); 20 DEFINE_FLAG(bool, print_stop_message, true, "Print stop message.");
21 DECLARE_FLAG(bool, inline_alloc); 21 DECLARE_FLAG(bool, inline_alloc);
22 22
23 23
24 Assembler::Assembler(bool use_far_branches) 24 Assembler::Assembler(bool use_far_branches)
25 : buffer_(), 25 : buffer_(),
26 prologue_offset_(-1), 26 prologue_offset_(-1),
27 comments_(), 27 comments_(),
28 constant_pool_allowed_(true) { 28 constant_pool_allowed_(false) {
29 // Far branching mode is only needed and implemented for MIPS and ARM. 29 // Far branching mode is only needed and implemented for MIPS and ARM.
30 ASSERT(!use_far_branches); 30 ASSERT(!use_far_branches);
31 } 31 }
32 32
33 33
34 void Assembler::InitializeMemoryWithBreakpoints(uword data, intptr_t length) { 34 void Assembler::InitializeMemoryWithBreakpoints(uword data, intptr_t length) {
35 memset(reinterpret_cast<void*>(data), Instr::kBreakPointInstruction, length); 35 memset(reinterpret_cast<void*>(data), Instr::kBreakPointInstruction, length);
36 } 36 }
37 37
38 38
(...skipping 17 matching lines...) Expand all
56 void Assembler::call(Label* label) { 56 void Assembler::call(Label* label) {
57 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 57 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
58 static const int kSize = 5; 58 static const int kSize = 5;
59 EmitUint8(0xE8); 59 EmitUint8(0xE8);
60 EmitLabel(label, kSize); 60 EmitLabel(label, kSize);
61 } 61 }
62 62
63 63
64 void Assembler::LoadExternalLabel(Register dst, 64 void Assembler::LoadExternalLabel(Register dst,
65 const ExternalLabel* label, 65 const ExternalLabel* label,
66 Patchability patchable, 66 Patchability patchable) {
67 Register pp) {
68 const int32_t offset = ObjectPool::element_offset( 67 const int32_t offset = ObjectPool::element_offset(
69 object_pool_wrapper_.FindExternalLabel(label, patchable)); 68 object_pool_wrapper_.FindExternalLabel(label, patchable));
70 LoadWordFromPoolOffset(dst, pp, offset - kHeapObjectTag); 69 LoadWordFromPoolOffset(dst, offset - kHeapObjectTag);
71 } 70 }
72 71
73 72
74 void Assembler::call(const ExternalLabel* label) { 73 void Assembler::call(const ExternalLabel* label) {
75 { // Encode movq(TMP, Immediate(label->address())), but always as imm64. 74 { // Encode movq(TMP, Immediate(label->address())), but always as imm64.
76 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 75 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
77 EmitRegisterREX(TMP, REX_W); 76 EmitRegisterREX(TMP, REX_W);
78 EmitUint8(0xB8 | (TMP & 7)); 77 EmitUint8(0xB8 | (TMP & 7));
79 EmitInt64(label->address()); 78 EmitInt64(label->address());
80 } 79 }
81 call(TMP); 80 call(TMP);
82 } 81 }
83 82
84 83
85 void Assembler::CallPatchable(const ExternalLabel* label) { 84 void Assembler::CallPatchable(const ExternalLabel* label) {
86 ASSERT(constant_pool_allowed()); 85 ASSERT(constant_pool_allowed());
87 intptr_t call_start = buffer_.GetPosition(); 86 intptr_t call_start = buffer_.GetPosition();
88 const int32_t offset = ObjectPool::element_offset( 87 const int32_t offset = ObjectPool::element_offset(
89 object_pool_wrapper_.FindExternalLabel(label, kPatchable)); 88 object_pool_wrapper_.FindExternalLabel(label, kPatchable));
90 call(Address::AddressBaseImm32(PP, offset - kHeapObjectTag)); 89 call(Address::AddressBaseImm32(PP, offset - kHeapObjectTag));
91 ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize); 90 ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize);
92 } 91 }
93 92
94 93
95 void Assembler::Call(const ExternalLabel* label, Register pp) { 94 void Assembler::Call(const ExternalLabel* label) {
95 ASSERT(constant_pool_allowed());
96 const int32_t offset = ObjectPool::element_offset( 96 const int32_t offset = ObjectPool::element_offset(
97 object_pool_wrapper_.FindExternalLabel(label, kNotPatchable)); 97 object_pool_wrapper_.FindExternalLabel(label, kNotPatchable));
98 call(Address::AddressBaseImm32(pp, offset - kHeapObjectTag)); 98 call(Address::AddressBaseImm32(PP, offset - kHeapObjectTag));
99 } 99 }
100 100
101 101
102 void Assembler::pushq(Register reg) { 102 void Assembler::pushq(Register reg) {
103 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 103 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
104 EmitRegisterREX(reg, REX_NONE); 104 EmitRegisterREX(reg, REX_NONE);
105 EmitUint8(0x50 | (reg & 7)); 105 EmitUint8(0x50 | (reg & 7));
106 } 106 }
107 107
108 108
(...skipping 10 matching lines...) Expand all
119 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 119 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
120 EmitUint8(0x68); 120 EmitUint8(0x68);
121 EmitImmediate(imm); 121 EmitImmediate(imm);
122 } else { 122 } else {
123 movq(TMP, imm); 123 movq(TMP, imm);
124 pushq(TMP); 124 pushq(TMP);
125 } 125 }
126 } 126 }
127 127
128 128
129 void Assembler::PushImmediate(const Immediate& imm, Register pp) { 129 void Assembler::PushImmediate(const Immediate& imm) {
130 if (CanLoadImmediateFromPool(imm, pp)) { 130 if (imm.is_int32()) {
131 LoadImmediate(TMP, imm, pp); 131 pushq(imm);
132 } else {
133 LoadImmediate(TMP, imm);
132 pushq(TMP); 134 pushq(TMP);
133 } else {
134 pushq(imm);
135 } 135 }
136 } 136 }
137 137
138 138
139 void Assembler::popq(Register reg) { 139 void Assembler::popq(Register reg) {
140 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 140 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
141 EmitRegisterREX(reg, REX_NONE); 141 EmitRegisterREX(reg, REX_NONE);
142 EmitUint8(0x58 | (reg & 7)); 142 EmitUint8(0x58 | (reg & 7));
143 } 143 }
144 144
(...skipping 630 matching lines...) Expand 10 before | Expand all | Expand 10 after
775 775
776 void Assembler::notps(XmmRegister dst) { 776 void Assembler::notps(XmmRegister dst) {
777 static const struct ALIGN16 { 777 static const struct ALIGN16 {
778 uint32_t a; 778 uint32_t a;
779 uint32_t b; 779 uint32_t b;
780 uint32_t c; 780 uint32_t c;
781 uint32_t d; 781 uint32_t d;
782 } float_not_constant = 782 } float_not_constant =
783 { 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF }; 783 { 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF };
784 LoadImmediate( 784 LoadImmediate(
785 TMP, Immediate(reinterpret_cast<intptr_t>(&float_not_constant)), PP); 785 TMP, Immediate(reinterpret_cast<intptr_t>(&float_not_constant)));
786 xorps(dst, Address(TMP, 0)); 786 xorps(dst, Address(TMP, 0));
787 } 787 }
788 788
789 789
790 void Assembler::negateps(XmmRegister dst) { 790 void Assembler::negateps(XmmRegister dst) {
791 static const struct ALIGN16 { 791 static const struct ALIGN16 {
792 uint32_t a; 792 uint32_t a;
793 uint32_t b; 793 uint32_t b;
794 uint32_t c; 794 uint32_t c;
795 uint32_t d; 795 uint32_t d;
796 } float_negate_constant = 796 } float_negate_constant =
797 { 0x80000000, 0x80000000, 0x80000000, 0x80000000 }; 797 { 0x80000000, 0x80000000, 0x80000000, 0x80000000 };
798 LoadImmediate( 798 LoadImmediate(
799 TMP, Immediate(reinterpret_cast<intptr_t>(&float_negate_constant)), PP); 799 TMP, Immediate(reinterpret_cast<intptr_t>(&float_negate_constant)));
800 xorps(dst, Address(TMP, 0)); 800 xorps(dst, Address(TMP, 0));
801 } 801 }
802 802
803 803
804 void Assembler::absps(XmmRegister dst) { 804 void Assembler::absps(XmmRegister dst) {
805 static const struct ALIGN16 { 805 static const struct ALIGN16 {
806 uint32_t a; 806 uint32_t a;
807 uint32_t b; 807 uint32_t b;
808 uint32_t c; 808 uint32_t c;
809 uint32_t d; 809 uint32_t d;
810 } float_absolute_constant = 810 } float_absolute_constant =
811 { 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF }; 811 { 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF };
812 LoadImmediate( 812 LoadImmediate(
813 TMP, Immediate(reinterpret_cast<intptr_t>(&float_absolute_constant)), PP); 813 TMP, Immediate(reinterpret_cast<intptr_t>(&float_absolute_constant)));
814 andps(dst, Address(TMP, 0)); 814 andps(dst, Address(TMP, 0));
815 } 815 }
816 816
817 817
818 void Assembler::zerowps(XmmRegister dst) { 818 void Assembler::zerowps(XmmRegister dst) {
819 static const struct ALIGN16 { 819 static const struct ALIGN16 {
820 uint32_t a; 820 uint32_t a;
821 uint32_t b; 821 uint32_t b;
822 uint32_t c; 822 uint32_t c;
823 uint32_t d; 823 uint32_t d;
824 } float_zerow_constant = 824 } float_zerow_constant =
825 { 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000 }; 825 { 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000 };
826 LoadImmediate( 826 LoadImmediate(
827 TMP, Immediate(reinterpret_cast<intptr_t>(&float_zerow_constant)), PP); 827 TMP, Immediate(reinterpret_cast<intptr_t>(&float_zerow_constant)));
828 andps(dst, Address(TMP, 0)); 828 andps(dst, Address(TMP, 0));
829 } 829 }
830 830
831 831
832 void Assembler::cmppseq(XmmRegister dst, XmmRegister src) { 832 void Assembler::cmppseq(XmmRegister dst, XmmRegister src) {
833 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 833 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
834 EmitREX_RB(dst, src); 834 EmitREX_RB(dst, src);
835 EmitUint8(0x0F); 835 EmitUint8(0x0F);
836 EmitUint8(0xC2); 836 EmitUint8(0xC2);
837 EmitXmmRegisterOperand(dst & 7, src); 837 EmitXmmRegisterOperand(dst & 7, src);
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after
1005 } 1005 }
1006 1006
1007 1007
1008 void Assembler::negatepd(XmmRegister dst) { 1008 void Assembler::negatepd(XmmRegister dst) {
1009 static const struct ALIGN16 { 1009 static const struct ALIGN16 {
1010 uint64_t a; 1010 uint64_t a;
1011 uint64_t b; 1011 uint64_t b;
1012 } double_negate_constant = 1012 } double_negate_constant =
1013 { 0x8000000000000000LL, 0x8000000000000000LL }; 1013 { 0x8000000000000000LL, 0x8000000000000000LL };
1014 LoadImmediate( 1014 LoadImmediate(
1015 TMP, Immediate(reinterpret_cast<intptr_t>(&double_negate_constant)), PP); 1015 TMP, Immediate(reinterpret_cast<intptr_t>(&double_negate_constant)));
1016 xorpd(dst, Address(TMP, 0)); 1016 xorpd(dst, Address(TMP, 0));
1017 } 1017 }
1018 1018
1019 1019
1020 void Assembler::subpd(XmmRegister dst, XmmRegister src) { 1020 void Assembler::subpd(XmmRegister dst, XmmRegister src) {
1021 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1021 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1022 ASSERT(src <= XMM15); 1022 ASSERT(src <= XMM15);
1023 ASSERT(dst <= XMM15); 1023 ASSERT(dst <= XMM15);
1024 EmitUint8(0x66); 1024 EmitUint8(0x66);
1025 EmitREX_RB(dst, src); 1025 EmitREX_RB(dst, src);
(...skipping 27 matching lines...) Expand all
1053 } 1053 }
1054 1054
1055 1055
1056 void Assembler::abspd(XmmRegister dst) { 1056 void Assembler::abspd(XmmRegister dst) {
1057 static const struct ALIGN16 { 1057 static const struct ALIGN16 {
1058 uint64_t a; 1058 uint64_t a;
1059 uint64_t b; 1059 uint64_t b;
1060 } double_absolute_const = 1060 } double_absolute_const =
1061 { 0x7FFFFFFFFFFFFFFFLL, 0x7FFFFFFFFFFFFFFFLL }; 1061 { 0x7FFFFFFFFFFFFFFFLL, 0x7FFFFFFFFFFFFFFFLL };
1062 LoadImmediate( 1062 LoadImmediate(
1063 TMP, Immediate(reinterpret_cast<intptr_t>(&double_absolute_const)), PP); 1063 TMP, Immediate(reinterpret_cast<intptr_t>(&double_absolute_const)));
1064 andpd(dst, Address(TMP, 0)); 1064 andpd(dst, Address(TMP, 0));
1065 } 1065 }
1066 1066
1067 1067
1068 void Assembler::minpd(XmmRegister dst, XmmRegister src) { 1068 void Assembler::minpd(XmmRegister dst, XmmRegister src) {
1069 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1069 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1070 ASSERT(src <= XMM15); 1070 ASSERT(src <= XMM15);
1071 ASSERT(dst <= XMM15); 1071 ASSERT(dst <= XMM15);
1072 EmitUint8(0x66); 1072 EmitUint8(0x66);
1073 EmitREX_RB(dst, src); 1073 EmitREX_RB(dst, src);
(...skipping 392 matching lines...) Expand 10 before | Expand all | Expand 10 after
1466 } 1466 }
1467 } 1467 }
1468 1468
1469 1469
1470 void Assembler::cmpq(Register reg, const Immediate& imm) { 1470 void Assembler::cmpq(Register reg, const Immediate& imm) {
1471 if (imm.is_int32()) { 1471 if (imm.is_int32()) {
1472 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1472 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1473 EmitRegisterREX(reg, REX_W); 1473 EmitRegisterREX(reg, REX_W);
1474 EmitComplex(7, Operand(reg), imm); 1474 EmitComplex(7, Operand(reg), imm);
1475 } else { 1475 } else {
1476 ASSERT(reg != TMP);
1476 movq(TMP, imm); 1477 movq(TMP, imm);
1477 cmpq(reg, TMP); 1478 cmpq(reg, TMP);
1478 } 1479 }
1479 } 1480 }
1480 1481
1481 1482
1482 void Assembler::cmpq(Register reg0, Register reg1) { 1483 void Assembler::cmpq(Register reg0, Register reg1) {
1483 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1484 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1484 Operand operand(reg1); 1485 Operand operand(reg1);
1485 EmitOperandREX(reg0, operand, REX_W); 1486 EmitOperandREX(reg0, operand, REX_W);
1486 EmitUint8(0x3B); 1487 EmitUint8(0x3B);
1487 EmitOperand(reg0 & 7, operand); 1488 EmitOperand(reg0 & 7, operand);
1488 } 1489 }
1489 1490
1490 1491
1491 void Assembler::cmpq(Register reg, const Address& address) { 1492 void Assembler::cmpq(Register reg, const Address& address) {
1492 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1493 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1493 EmitOperandREX(reg, address, REX_W); 1494 EmitOperandREX(reg, address, REX_W);
1494 EmitUint8(0x3B); 1495 EmitUint8(0x3B);
1495 EmitOperand(reg & 7, address); 1496 EmitOperand(reg & 7, address);
1496 } 1497 }
1497 1498
1498 1499
1499 void Assembler::CompareImmediate(Register reg, const Immediate& imm, 1500 void Assembler::CompareImmediate(Register reg, const Immediate& imm) {
1500 Register pp) { 1501 if (imm.is_int32()) {
1501 if (CanLoadImmediateFromPool(imm, pp)) { 1502 cmpq(reg, imm);
1502 LoadImmediate(TMP, imm, pp); 1503 } else {
1504 ASSERT(reg != TMP);
1505 LoadImmediate(TMP, imm);
1503 cmpq(reg, TMP); 1506 cmpq(reg, TMP);
1504 } else {
1505 cmpq(reg, imm);
1506 } 1507 }
1507 } 1508 }
1508 1509
1509 1510
1510 void Assembler::CompareImmediate(const Address& address, const Immediate& imm, 1511 void Assembler::CompareImmediate(const Address& address, const Immediate& imm) {
1511 Register pp) { 1512 if (imm.is_int32()) {
1512 if (CanLoadImmediateFromPool(imm, pp)) { 1513 cmpq(address, imm);
1513 LoadImmediate(TMP, imm, pp); 1514 } else {
1515 LoadImmediate(TMP, imm);
1514 cmpq(address, TMP); 1516 cmpq(address, TMP);
1515 } else {
1516 cmpq(address, imm);
1517 } 1517 }
1518 } 1518 }
1519 1519
1520 1520
1521 void Assembler::testl(Register reg1, Register reg2) { 1521 void Assembler::testl(Register reg1, Register reg2) {
1522 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1522 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1523 Operand operand(reg2); 1523 Operand operand(reg2);
1524 EmitOperandREX(reg1, operand, REX_NONE); 1524 EmitOperandREX(reg1, operand, REX_NONE);
1525 EmitUint8(0x85); 1525 EmitUint8(0x85);
1526 EmitOperand(reg1 & 7, operand); 1526 EmitOperand(reg1 & 7, operand);
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
1601 } else { 1601 } else {
1602 EmitRegisterREX(reg, REX_W); 1602 EmitRegisterREX(reg, REX_W);
1603 EmitUint8(0xF7); 1603 EmitUint8(0xF7);
1604 EmitUint8(0xC0 | (reg & 7)); 1604 EmitUint8(0xC0 | (reg & 7));
1605 } 1605 }
1606 EmitImmediate(imm); 1606 EmitImmediate(imm);
1607 } 1607 }
1608 } 1608 }
1609 1609
1610 1610
1611 void Assembler::TestImmediate(Register dst, const Immediate& imm, Register pp) { 1611 void Assembler::TestImmediate(Register dst, const Immediate& imm) {
1612 if (CanLoadImmediateFromPool(imm, pp)) { 1612 if (imm.is_int32()) {
1613 testq(dst, imm);
1614 } else {
1613 ASSERT(dst != TMP); 1615 ASSERT(dst != TMP);
1614 LoadImmediate(TMP, imm, pp); 1616 LoadImmediate(TMP, imm);
1615 testq(dst, TMP); 1617 testq(dst, TMP);
1616 } else {
1617 testq(dst, imm);
1618 } 1618 }
1619 } 1619 }
1620 1620
1621 1621
1622 void Assembler::andl(Register dst, Register src) { 1622 void Assembler::andl(Register dst, Register src) {
1623 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1623 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1624 Operand operand(src); 1624 Operand operand(src);
1625 EmitOperandREX(dst, operand, REX_NONE); 1625 EmitOperandREX(dst, operand, REX_NONE);
1626 EmitUint8(0x23); 1626 EmitUint8(0x23);
1627 EmitOperand(dst & 7, operand); 1627 EmitOperand(dst & 7, operand);
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
1684 EmitOperand(dst & 7, address); 1684 EmitOperand(dst & 7, address);
1685 } 1685 }
1686 1686
1687 1687
1688 void Assembler::andq(Register dst, const Immediate& imm) { 1688 void Assembler::andq(Register dst, const Immediate& imm) {
1689 if (imm.is_int32()) { 1689 if (imm.is_int32()) {
1690 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1690 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1691 EmitRegisterREX(dst, REX_W); 1691 EmitRegisterREX(dst, REX_W);
1692 EmitComplex(4, Operand(dst), imm); 1692 EmitComplex(4, Operand(dst), imm);
1693 } else { 1693 } else {
1694 ASSERT(dst != TMP);
1694 movq(TMP, imm); 1695 movq(TMP, imm);
1695 andq(dst, TMP); 1696 andq(dst, TMP);
1696 } 1697 }
1697 } 1698 }
1698 1699
1699 1700
1700 void Assembler::AndImmediate(Register dst, const Immediate& imm, Register pp) { 1701 void Assembler::AndImmediate(Register dst, const Immediate& imm) {
1701 if (CanLoadImmediateFromPool(imm, pp)) { 1702 if (imm.is_int32()) {
1703 andq(dst, imm);
1704 } else {
1702 ASSERT(dst != TMP); 1705 ASSERT(dst != TMP);
1703 LoadImmediate(TMP, imm, pp); 1706 LoadImmediate(TMP, imm);
1704 andq(dst, TMP); 1707 andq(dst, TMP);
1705 } else {
1706 andq(dst, imm);
1707 } 1708 }
1708 } 1709 }
1709 1710
1710 1711
1711 void Assembler::orq(Register dst, Register src) { 1712 void Assembler::orq(Register dst, Register src) {
1712 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1713 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1713 Operand operand(src); 1714 Operand operand(src);
1714 EmitOperandREX(dst, operand, REX_W); 1715 EmitOperandREX(dst, operand, REX_W);
1715 EmitUint8(0x0B); 1716 EmitUint8(0x0B);
1716 EmitOperand(dst & 7, operand); 1717 EmitOperand(dst & 7, operand);
1717 } 1718 }
1718 1719
1719 1720
1720 void Assembler::orq(Register dst, const Address& address) { 1721 void Assembler::orq(Register dst, const Address& address) {
1721 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1722 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1722 EmitOperandREX(dst, address, REX_W); 1723 EmitOperandREX(dst, address, REX_W);
1723 EmitUint8(0x0B); 1724 EmitUint8(0x0B);
1724 EmitOperand(dst & 7, address); 1725 EmitOperand(dst & 7, address);
1725 } 1726 }
1726 1727
1727 1728
1728 void Assembler::orq(Register dst, const Immediate& imm) { 1729 void Assembler::orq(Register dst, const Immediate& imm) {
1729 if (imm.is_int32()) { 1730 if (imm.is_int32()) {
1730 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1731 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1731 EmitRegisterREX(dst, REX_W); 1732 EmitRegisterREX(dst, REX_W);
1732 EmitComplex(1, Operand(dst), imm); 1733 EmitComplex(1, Operand(dst), imm);
1733 } else { 1734 } else {
1735 ASSERT(dst != TMP);
1734 movq(TMP, imm); 1736 movq(TMP, imm);
1735 orq(dst, TMP); 1737 orq(dst, TMP);
1736 } 1738 }
1737 } 1739 }
1738 1740
1739 1741
1740 void Assembler::OrImmediate(Register dst, const Immediate& imm, Register pp) { 1742 void Assembler::OrImmediate(Register dst, const Immediate& imm) {
1741 if (CanLoadImmediateFromPool(imm, pp)) { 1743 if (imm.is_int32()) {
1744 orq(dst, imm);
1745 } else {
1742 ASSERT(dst != TMP); 1746 ASSERT(dst != TMP);
1743 LoadImmediate(TMP, imm, pp); 1747 LoadImmediate(TMP, imm);
1744 orq(dst, TMP); 1748 orq(dst, TMP);
1745 } else {
1746 orq(dst, imm);
1747 } 1749 }
1748 } 1750 }
1749 1751
1750 1752
1751 void Assembler::xorq(Register dst, Register src) { 1753 void Assembler::xorq(Register dst, Register src) {
1752 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1754 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1753 Operand operand(src); 1755 Operand operand(src);
1754 EmitOperandREX(dst, operand, REX_W); 1756 EmitOperandREX(dst, operand, REX_W);
1755 EmitUint8(0x33); 1757 EmitUint8(0x33);
1756 EmitOperand(dst & 7, operand); 1758 EmitOperand(dst & 7, operand);
(...skipping 15 matching lines...) Expand all
1772 EmitOperand(src & 7, dst); 1774 EmitOperand(src & 7, dst);
1773 } 1775 }
1774 1776
1775 1777
1776 void Assembler::xorq(Register dst, const Immediate& imm) { 1778 void Assembler::xorq(Register dst, const Immediate& imm) {
1777 if (imm.is_int32()) { 1779 if (imm.is_int32()) {
1778 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1780 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1779 EmitRegisterREX(dst, REX_W); 1781 EmitRegisterREX(dst, REX_W);
1780 EmitComplex(6, Operand(dst), imm); 1782 EmitComplex(6, Operand(dst), imm);
1781 } else { 1783 } else {
1784 ASSERT(dst != TMP);
1782 movq(TMP, imm); 1785 movq(TMP, imm);
1783 xorq(dst, TMP); 1786 xorq(dst, TMP);
1784 } 1787 }
1785 } 1788 }
1786 1789
1787 1790
1788 void Assembler::XorImmediate(Register dst, const Immediate& imm, Register pp) { 1791 void Assembler::XorImmediate(Register dst, const Immediate& imm) {
1789 if (CanLoadImmediateFromPool(imm, pp)) { 1792 if (imm.is_int32()) {
1793 xorq(dst, imm);
1794 } else {
1790 ASSERT(dst != TMP); 1795 ASSERT(dst != TMP);
1791 LoadImmediate(TMP, imm, pp); 1796 LoadImmediate(TMP, imm);
1792 xorq(dst, TMP); 1797 xorq(dst, TMP);
1793 } else {
1794 xorq(dst, imm);
1795 } 1798 }
1796 } 1799 }
1797 1800
1798 1801
1799 void Assembler::addl(Register dst, Register src) { 1802 void Assembler::addl(Register dst, Register src) {
1800 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1803 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1801 Operand operand(src); 1804 Operand operand(src);
1802 EmitOperandREX(dst, operand, REX_NONE); 1805 EmitOperandREX(dst, operand, REX_NONE);
1803 EmitUint8(0x03); 1806 EmitUint8(0x03);
1804 EmitOperand(dst & 7, operand); 1807 EmitOperand(dst & 7, operand);
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
1870 EmitOperand(dst & 7, address); 1873 EmitOperand(dst & 7, address);
1871 } 1874 }
1872 1875
1873 1876
1874 void Assembler::addq(Register dst, const Immediate& imm) { 1877 void Assembler::addq(Register dst, const Immediate& imm) {
1875 if (imm.is_int32()) { 1878 if (imm.is_int32()) {
1876 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1879 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1877 EmitRegisterREX(dst, REX_W); 1880 EmitRegisterREX(dst, REX_W);
1878 EmitComplex(0, Operand(dst), imm); 1881 EmitComplex(0, Operand(dst), imm);
1879 } else { 1882 } else {
1883 ASSERT(dst != TMP);
1880 movq(TMP, imm); 1884 movq(TMP, imm);
1881 addq(dst, TMP); 1885 addq(dst, TMP);
1882 } 1886 }
1883 } 1887 }
1884 1888
1885 1889
1886 void Assembler::addq(const Address& address, const Immediate& imm) { 1890 void Assembler::addq(const Address& address, const Immediate& imm) {
1887 if (imm.is_int32()) { 1891 if (imm.is_int32()) {
1888 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1892 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1889 EmitOperandREX(0, address, REX_W); 1893 EmitOperandREX(0, address, REX_W);
(...skipping 21 matching lines...) Expand all
1911 EmitOperand(dst & 7, operand); 1915 EmitOperand(dst & 7, operand);
1912 } 1916 }
1913 1917
1914 1918
1915 void Assembler::adcq(Register dst, const Immediate& imm) { 1919 void Assembler::adcq(Register dst, const Immediate& imm) {
1916 if (imm.is_int32()) { 1920 if (imm.is_int32()) {
1917 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1921 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1918 EmitRegisterREX(dst, REX_W); 1922 EmitRegisterREX(dst, REX_W);
1919 EmitComplex(2, Operand(dst), imm); 1923 EmitComplex(2, Operand(dst), imm);
1920 } else { 1924 } else {
1925 ASSERT(dst != TMP);
1921 movq(TMP, imm); 1926 movq(TMP, imm);
1922 adcq(dst, TMP); 1927 adcq(dst, TMP);
1923 } 1928 }
1924 } 1929 }
1925 1930
1926 1931
1927 void Assembler::adcq(Register dst, const Address& address) { 1932 void Assembler::adcq(Register dst, const Address& address) {
1928 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1933 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1929 EmitOperandREX(dst, address, REX_W); 1934 EmitOperandREX(dst, address, REX_W);
1930 EmitUint8(0x13); 1935 EmitUint8(0x13);
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
2067 2072
2068 void Assembler::imulq(Register reg, const Immediate& imm) { 2073 void Assembler::imulq(Register reg, const Immediate& imm) {
2069 if (imm.is_int32()) { 2074 if (imm.is_int32()) {
2070 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 2075 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2071 Operand operand(reg); 2076 Operand operand(reg);
2072 EmitOperandREX(reg, operand, REX_W); 2077 EmitOperandREX(reg, operand, REX_W);
2073 EmitUint8(0x69); 2078 EmitUint8(0x69);
2074 EmitOperand(reg & 7, Operand(reg)); 2079 EmitOperand(reg & 7, Operand(reg));
2075 EmitImmediate(imm); 2080 EmitImmediate(imm);
2076 } else { 2081 } else {
2082 ASSERT(reg != TMP);
2077 movq(TMP, imm); 2083 movq(TMP, imm);
2078 imulq(reg, TMP); 2084 imulq(reg, TMP);
2079 } 2085 }
2080 } 2086 }
2081 2087
2082 2088
2083 void Assembler::MulImmediate(Register reg, const Immediate& imm, Register pp) { 2089 void Assembler::MulImmediate(Register reg, const Immediate& imm) {
2084 if (CanLoadImmediateFromPool(imm, pp)) { 2090 if (imm.is_int32()) {
2091 imulq(reg, imm);
2092 } else {
2085 ASSERT(reg != TMP); 2093 ASSERT(reg != TMP);
2086 LoadImmediate(TMP, imm, pp); 2094 LoadImmediate(TMP, imm);
2087 imulq(reg, TMP); 2095 imulq(reg, TMP);
2088 } else {
2089 imulq(reg, imm);
2090 } 2096 }
2091 } 2097 }
2092 2098
2093 2099
2094 void Assembler::imulq(Register dst, const Address& address) { 2100 void Assembler::imulq(Register dst, const Address& address) {
2095 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 2101 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2096 EmitOperandREX(dst, address, REX_W); 2102 EmitOperandREX(dst, address, REX_W);
2097 EmitUint8(0x0F); 2103 EmitUint8(0x0F);
2098 EmitUint8(0xAF); 2104 EmitUint8(0xAF);
2099 EmitOperand(dst & 7, address); 2105 EmitOperand(dst & 7, address);
(...skipping 16 matching lines...) Expand all
2116 EmitOperand(dst & 7, operand); 2122 EmitOperand(dst & 7, operand);
2117 } 2123 }
2118 2124
2119 2125
2120 void Assembler::subq(Register reg, const Immediate& imm) { 2126 void Assembler::subq(Register reg, const Immediate& imm) {
2121 if (imm.is_int32()) { 2127 if (imm.is_int32()) {
2122 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 2128 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2123 EmitRegisterREX(reg, REX_W); 2129 EmitRegisterREX(reg, REX_W);
2124 EmitComplex(5, Operand(reg), imm); 2130 EmitComplex(5, Operand(reg), imm);
2125 } else { 2131 } else {
2132 ASSERT(reg != TMP);
2126 movq(TMP, imm); 2133 movq(TMP, imm);
2127 subq(reg, TMP); 2134 subq(reg, TMP);
2128 } 2135 }
2129 } 2136 }
2130 2137
2131 2138
2132 void Assembler::subq(Register reg, const Address& address) { 2139 void Assembler::subq(Register reg, const Address& address) {
2133 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 2140 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2134 EmitOperandREX(reg, address, REX_W); 2141 EmitOperandREX(reg, address, REX_W);
2135 EmitUint8(0x2B); 2142 EmitUint8(0x2B);
(...skipping 29 matching lines...) Expand all
2165 EmitOperand(dst & 7, operand); 2172 EmitOperand(dst & 7, operand);
2166 } 2173 }
2167 2174
2168 2175
2169 void Assembler::sbbq(Register dst, const Immediate& imm) { 2176 void Assembler::sbbq(Register dst, const Immediate& imm) {
2170 if (imm.is_int32()) { 2177 if (imm.is_int32()) {
2171 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 2178 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2172 EmitRegisterREX(dst, REX_W); 2179 EmitRegisterREX(dst, REX_W);
2173 EmitComplex(3, Operand(dst), imm); 2180 EmitComplex(3, Operand(dst), imm);
2174 } else { 2181 } else {
2182 ASSERT(dst != TMP);
2175 movq(TMP, imm); 2183 movq(TMP, imm);
2176 sbbq(dst, TMP); 2184 sbbq(dst, TMP);
2177 } 2185 }
2178 } 2186 }
2179 2187
2180 2188
2181 void Assembler::sbbq(Register dst, const Address& address) { 2189 void Assembler::sbbq(Register dst, const Address& address) {
2182 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 2190 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2183 EmitOperandREX(dst, address, REX_W); 2191 EmitOperandREX(dst, address, REX_W);
2184 EmitUint8(0x1B); 2192 EmitUint8(0x1B);
(...skipping 407 matching lines...) Expand 10 before | Expand all | Expand 10 after
2592 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 2600 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2593 EmitRegisterREX(TMP, REX_W); 2601 EmitRegisterREX(TMP, REX_W);
2594 EmitUint8(0xB8 | (TMP & 7)); 2602 EmitUint8(0xB8 | (TMP & 7));
2595 EmitInt64(label->address()); 2603 EmitInt64(label->address());
2596 } 2604 }
2597 jmp(TMP); 2605 jmp(TMP);
2598 } 2606 }
2599 2607
2600 2608
2601 void Assembler::JmpPatchable(const ExternalLabel* label, Register pp) { 2609 void Assembler::JmpPatchable(const ExternalLabel* label, Register pp) {
2602 ASSERT(constant_pool_allowed()); 2610 ASSERT((pp != PP) || constant_pool_allowed());
2603 intptr_t call_start = buffer_.GetPosition(); 2611 intptr_t call_start = buffer_.GetPosition();
2604 const int32_t offset = ObjectPool::element_offset( 2612 const int32_t offset = ObjectPool::element_offset(
2605 object_pool_wrapper_.FindExternalLabel(label, kPatchable)); 2613 object_pool_wrapper_.FindExternalLabel(label, kPatchable));
2606 // Patchable jumps always use a 32-bit immediate encoding. 2614 // Patchable jumps always use a 32-bit immediate encoding.
2607 jmp(Address::AddressBaseImm32(pp, offset - kHeapObjectTag)); 2615 jmp(Address::AddressBaseImm32(pp, offset - kHeapObjectTag));
2608 ASSERT((buffer_.GetPosition() - call_start) == JumpPattern::kLengthInBytes); 2616 ASSERT((buffer_.GetPosition() - call_start) == JumpPattern::kLengthInBytes);
2609 } 2617 }
2610 2618
2611 2619
2612 void Assembler::Jmp(const ExternalLabel* label, Register pp) { 2620 void Assembler::Jmp(const ExternalLabel* label, Register pp) {
2621 ASSERT((pp != PP) || constant_pool_allowed());
2613 const int32_t offset = ObjectPool::element_offset( 2622 const int32_t offset = ObjectPool::element_offset(
2614 object_pool_wrapper_.FindExternalLabel(label, kNotPatchable)); 2623 object_pool_wrapper_.FindExternalLabel(label, kNotPatchable));
2615 jmp(Address(pp, offset - kHeapObjectTag)); 2624 jmp(Address(pp, offset - kHeapObjectTag));
2616 } 2625 }
2617 2626
2618 2627
2619 void Assembler::lock() { 2628 void Assembler::lock() {
2620 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 2629 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2621 EmitUint8(0xF0); 2630 EmitUint8(0xF0);
2622 } 2631 }
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
2657 movq(to, from); 2666 movq(to, from);
2658 } 2667 }
2659 } 2668 }
2660 2669
2661 2670
2662 void Assembler::PopRegister(Register r) { 2671 void Assembler::PopRegister(Register r) {
2663 popq(r); 2672 popq(r);
2664 } 2673 }
2665 2674
2666 2675
2667 void Assembler::AddImmediate(Register reg, const Immediate& imm, Register pp) { 2676 void Assembler::AddImmediate(Register reg, const Immediate& imm) {
2668 const int64_t value = imm.value(); 2677 const int64_t value = imm.value();
2669 if (value == 0) { 2678 if (value == 0) {
2670 return; 2679 return;
2671 } 2680 }
2672 if ((value > 0) || (value == kMinInt64)) { 2681 if ((value > 0) || (value == kMinInt64)) {
2673 if (value == 1) { 2682 if (value == 1) {
2674 incq(reg); 2683 incq(reg);
2675 } else { 2684 } else {
2676 if (CanLoadImmediateFromPool(imm, pp)) { 2685 if (imm.is_int32()) {
2686 addq(reg, imm);
2687 } else {
2677 ASSERT(reg != TMP); 2688 ASSERT(reg != TMP);
2678 LoadImmediate(TMP, imm, pp); 2689 LoadImmediate(TMP, imm);
2679 addq(reg, TMP); 2690 addq(reg, TMP);
2680 } else {
2681 addq(reg, imm);
2682 } 2691 }
2683 } 2692 }
2684 } else { 2693 } else {
2685 SubImmediate(reg, Immediate(-value), pp); 2694 SubImmediate(reg, Immediate(-value));
2686 } 2695 }
2687 } 2696 }
2688 2697
2689 2698
2690 void Assembler::AddImmediate(const Address& address, const Immediate& imm, 2699 void Assembler::AddImmediate(const Address& address, const Immediate& imm) {
2691 Register pp) {
2692 const int64_t value = imm.value(); 2700 const int64_t value = imm.value();
2693 if (value == 0) { 2701 if (value == 0) {
2694 return; 2702 return;
2695 } 2703 }
2696 if ((value > 0) || (value == kMinInt64)) { 2704 if ((value > 0) || (value == kMinInt64)) {
2697 if (value == 1) { 2705 if (value == 1) {
2698 incq(address); 2706 incq(address);
2699 } else { 2707 } else {
2700 if (CanLoadImmediateFromPool(imm, pp)) { 2708 if (imm.is_int32()) {
2701 LoadImmediate(TMP, imm, pp); 2709 addq(address, imm);
2710 } else {
2711 LoadImmediate(TMP, imm);
2702 addq(address, TMP); 2712 addq(address, TMP);
2703 } else {
2704 addq(address, imm);
2705 } 2713 }
2706 } 2714 }
2707 } else { 2715 } else {
2708 SubImmediate(address, Immediate(-value), pp); 2716 SubImmediate(address, Immediate(-value));
2709 } 2717 }
2710 } 2718 }
2711 2719
2712 2720
2713 void Assembler::SubImmediate(Register reg, const Immediate& imm, Register pp) { 2721 void Assembler::SubImmediate(Register reg, const Immediate& imm) {
2714 const int64_t value = imm.value(); 2722 const int64_t value = imm.value();
2715 if (value == 0) { 2723 if (value == 0) {
2716 return; 2724 return;
2717 } 2725 }
2718 if ((value > 0) || (value == kMinInt64)) { 2726 if ((value > 0) || (value == kMinInt64)) {
2719 if (value == 1) { 2727 if (value == 1) {
2720 decq(reg); 2728 decq(reg);
2721 } else { 2729 } else {
2722 if (CanLoadImmediateFromPool(imm, pp)) { 2730 if (imm.is_int32()) {
2731 subq(reg, imm);
2732 } else {
2723 ASSERT(reg != TMP); 2733 ASSERT(reg != TMP);
2724 LoadImmediate(TMP, imm, pp); 2734 LoadImmediate(TMP, imm);
2725 subq(reg, TMP); 2735 subq(reg, TMP);
2726 } else {
2727 subq(reg, imm);
2728 } 2736 }
2729 } 2737 }
2730 } else { 2738 } else {
2731 AddImmediate(reg, Immediate(-value), pp); 2739 AddImmediate(reg, Immediate(-value));
2732 } 2740 }
2733 } 2741 }
2734 2742
2735 2743
2736 void Assembler::SubImmediate(const Address& address, const Immediate& imm, 2744 void Assembler::SubImmediate(const Address& address, const Immediate& imm) {
2737 Register pp) {
2738 const int64_t value = imm.value(); 2745 const int64_t value = imm.value();
2739 if (value == 0) { 2746 if (value == 0) {
2740 return; 2747 return;
2741 } 2748 }
2742 if ((value > 0) || (value == kMinInt64)) { 2749 if ((value > 0) || (value == kMinInt64)) {
2743 if (value == 1) { 2750 if (value == 1) {
2744 decq(address); 2751 decq(address);
2745 } else { 2752 } else {
2746 if (CanLoadImmediateFromPool(imm, pp)) { 2753 if (imm.is_int32()) {
2747 LoadImmediate(TMP, imm, pp); 2754 subq(address, imm);
2755 } else {
2756 LoadImmediate(TMP, imm);
2748 subq(address, TMP); 2757 subq(address, TMP);
2749 } else {
2750 subq(address, imm);
2751 } 2758 }
2752 } 2759 }
2753 } else { 2760 } else {
2754 AddImmediate(address, Immediate(-value), pp); 2761 AddImmediate(address, Immediate(-value));
2755 } 2762 }
2756 } 2763 }
2757 2764
2758 2765
2759 void Assembler::Drop(intptr_t stack_elements, Register tmp) { 2766 void Assembler::Drop(intptr_t stack_elements, Register tmp) {
2760 ASSERT(stack_elements >= 0); 2767 ASSERT(stack_elements >= 0);
2761 if (stack_elements <= 4) { 2768 if (stack_elements <= 4) {
2762 for (intptr_t i = 0; i < stack_elements; i++) { 2769 for (intptr_t i = 0; i < stack_elements; i++) {
2763 popq(tmp); 2770 popq(tmp);
2764 } 2771 }
(...skipping 15 matching lines...) Expand all
2780 // If the raw smi does not fit into a 32-bit signed int, then we'll keep 2787 // If the raw smi does not fit into a 32-bit signed int, then we'll keep
2781 // the raw value in the object pool. 2788 // the raw value in the object pool.
2782 return !Utils::IsInt(32, reinterpret_cast<int64_t>(object.raw())); 2789 return !Utils::IsInt(32, reinterpret_cast<int64_t>(object.raw()));
2783 } 2790 }
2784 ASSERT(object.IsNotTemporaryScopedHandle()); 2791 ASSERT(object.IsNotTemporaryScopedHandle());
2785 ASSERT(object.IsOld()); 2792 ASSERT(object.IsOld());
2786 return true; 2793 return true;
2787 } 2794 }
2788 2795
2789 2796
2790 void Assembler::LoadWordFromPoolOffset(Register dst, Register pp, 2797 void Assembler::LoadWordFromPoolOffset(Register dst, int32_t offset) {
2791 int32_t offset) { 2798 ASSERT(constant_pool_allowed());
2799 ASSERT(dst != PP);
2792 // This sequence must be of fixed size. AddressBaseImm32 2800 // This sequence must be of fixed size. AddressBaseImm32
2793 // forces the address operand to use a fixed-size imm32 encoding. 2801 // forces the address operand to use a fixed-size imm32 encoding.
2794 movq(dst, Address::AddressBaseImm32(pp, offset)); 2802 movq(dst, Address::AddressBaseImm32(PP, offset));
2795 } 2803 }
2796 2804
2797 2805
2798 void Assembler::LoadIsolate(Register dst) { 2806 void Assembler::LoadIsolate(Register dst) {
2799 movq(dst, Address(THR, Thread::isolate_offset())); 2807 movq(dst, Address(THR, Thread::isolate_offset()));
2800 } 2808 }
2801 2809
2802 2810
2803 void Assembler::LoadObjectHelper(Register dst, 2811 void Assembler::LoadObjectHelper(Register dst,
2804 const Object& object, 2812 const Object& object,
2805 Register pp,
2806 bool is_unique) { 2813 bool is_unique) {
2807 if (Thread::CanLoadFromThread(object)) { 2814 if (Thread::CanLoadFromThread(object)) {
2808 movq(dst, Address(THR, Thread::OffsetFromThread(object))); 2815 movq(dst, Address(THR, Thread::OffsetFromThread(object)));
2809 } else if (CanLoadFromObjectPool(object)) { 2816 } else if (CanLoadFromObjectPool(object)) {
2810 const int32_t offset = ObjectPool::element_offset( 2817 const int32_t offset = ObjectPool::element_offset(
2811 is_unique ? object_pool_wrapper_.AddObject(object) 2818 is_unique ? object_pool_wrapper_.AddObject(object)
2812 : object_pool_wrapper_.FindObject(object)); 2819 : object_pool_wrapper_.FindObject(object));
2813 LoadWordFromPoolOffset(dst, pp, offset - kHeapObjectTag); 2820 LoadWordFromPoolOffset(dst, offset - kHeapObjectTag);
2814 } else { 2821 } else {
2815 ASSERT(object.IsSmi() || object.InVMHeap()); 2822 ASSERT(object.IsSmi() || object.InVMHeap());
2816 LoadImmediate(dst, Immediate(reinterpret_cast<int64_t>(object.raw())), pp); 2823 LoadImmediate(dst, Immediate(reinterpret_cast<int64_t>(object.raw())));
2817 } 2824 }
2818 } 2825 }
2819 2826
2820 2827
2821 void Assembler::LoadObject(Register dst, const Object& object, Register pp) { 2828 void Assembler::LoadFunctionFromCalleePool(Register dst,
2822 LoadObjectHelper(dst, object, pp, false); 2829 const Function& function,
2830 Register new_pp) {
2831 ASSERT(!constant_pool_allowed());
2832 ASSERT(new_pp != PP);
2833 const int32_t offset =
2834 ObjectPool::element_offset(object_pool_wrapper_.FindObject(function));
2835 movq(dst, Address::AddressBaseImm32(new_pp, offset - kHeapObjectTag));
2823 } 2836 }
2824 2837
2825 2838
2826 void Assembler::LoadUniqueObject(Register dst, 2839 void Assembler::LoadObject(Register dst, const Object& object) {
2827 const Object& object, 2840 LoadObjectHelper(dst, object, false);
2828 Register pp) {
2829 LoadObjectHelper(dst, object, pp, true);
2830 } 2841 }
2831 2842
2832 2843
2833 void Assembler::StoreObject(const Address& dst, const Object& object, 2844 void Assembler::LoadUniqueObject(Register dst, const Object& object) {
2834 Register pp) { 2845 LoadObjectHelper(dst, object, true);
2846 }
2847
2848
2849 void Assembler::StoreObject(const Address& dst, const Object& object) {
2835 if (Thread::CanLoadFromThread(object)) { 2850 if (Thread::CanLoadFromThread(object)) {
2836 movq(TMP, Address(THR, Thread::OffsetFromThread(object))); 2851 movq(TMP, Address(THR, Thread::OffsetFromThread(object)));
2837 movq(dst, TMP); 2852 movq(dst, TMP);
2838 } else if (CanLoadFromObjectPool(object)) { 2853 } else if (CanLoadFromObjectPool(object)) {
2839 LoadObject(TMP, object, pp); 2854 LoadObject(TMP, object);
2840 movq(dst, TMP); 2855 movq(dst, TMP);
2841 } else { 2856 } else {
2842 MoveImmediate(dst, Immediate(reinterpret_cast<int64_t>(object.raw())), pp); 2857 MoveImmediate(dst, Immediate(reinterpret_cast<int64_t>(object.raw())));
2843 } 2858 }
2844 } 2859 }
2845 2860
2846 2861
2847 void Assembler::PushObject(const Object& object, Register pp) { 2862 void Assembler::PushObject(const Object& object) {
2848 if (Thread::CanLoadFromThread(object)) { 2863 if (Thread::CanLoadFromThread(object)) {
2849 pushq(Address(THR, Thread::OffsetFromThread(object))); 2864 pushq(Address(THR, Thread::OffsetFromThread(object)));
2850 } else if (CanLoadFromObjectPool(object)) { 2865 } else if (CanLoadFromObjectPool(object)) {
2851 LoadObject(TMP, object, pp); 2866 LoadObject(TMP, object);
2852 pushq(TMP); 2867 pushq(TMP);
2853 } else { 2868 } else {
2854 PushImmediate(Immediate(reinterpret_cast<int64_t>(object.raw())), pp); 2869 PushImmediate(Immediate(reinterpret_cast<int64_t>(object.raw())));
2855 } 2870 }
2856 } 2871 }
2857 2872
2858 2873
2859 void Assembler::CompareObject(Register reg, const Object& object, Register pp) { 2874 void Assembler::CompareObject(Register reg, const Object& object) {
2860 if (Thread::CanLoadFromThread(object)) { 2875 if (Thread::CanLoadFromThread(object)) {
2861 cmpq(reg, Address(THR, Thread::OffsetFromThread(object))); 2876 cmpq(reg, Address(THR, Thread::OffsetFromThread(object)));
2862 } else if (CanLoadFromObjectPool(object)) { 2877 } else if (CanLoadFromObjectPool(object)) {
2863 const int32_t offset = 2878 const int32_t offset =
2864 ObjectPool::element_offset(object_pool_wrapper_.FindObject(object)); 2879 ObjectPool::element_offset(object_pool_wrapper_.FindObject(object));
2865 cmpq(reg, Address(pp, offset-kHeapObjectTag)); 2880 cmpq(reg, Address(PP, offset-kHeapObjectTag));
2866 } else { 2881 } else {
2867 CompareImmediate( 2882 CompareImmediate(
2868 reg, Immediate(reinterpret_cast<int64_t>(object.raw())), pp); 2883 reg, Immediate(reinterpret_cast<int64_t>(object.raw())));
2869 } 2884 }
2870 } 2885 }
2871 2886
2872 2887
2873 intptr_t Assembler::FindImmediate(int64_t imm) { 2888 intptr_t Assembler::FindImmediate(int64_t imm) {
2874 return object_pool_wrapper_.FindImmediate(imm); 2889 return object_pool_wrapper_.FindImmediate(imm);
2875 } 2890 }
2876 2891
2877 2892
2878 bool Assembler::CanLoadImmediateFromPool(const Immediate& imm, Register pp) { 2893 void Assembler::LoadImmediate(Register reg, const Immediate& imm) {
2879 if (!constant_pool_allowed()) { 2894 if (imm.is_int32() || !constant_pool_allowed()) {
2880 return false; 2895 movq(reg, imm);
2881 } 2896 } else {
2882 return !imm.is_int32() && (pp != kNoRegister);
2883 }
2884
2885
2886 void Assembler::LoadImmediate(Register reg, const Immediate& imm, Register pp) {
2887 if (CanLoadImmediateFromPool(imm, pp)) {
2888 int32_t offset = ObjectPool::element_offset(FindImmediate(imm.value())); 2897 int32_t offset = ObjectPool::element_offset(FindImmediate(imm.value()));
2889 LoadWordFromPoolOffset(reg, pp, offset - kHeapObjectTag); 2898 LoadWordFromPoolOffset(reg, offset - kHeapObjectTag);
2890 } else {
2891 movq(reg, imm);
2892 } 2899 }
2893 } 2900 }
2894 2901
2895 2902
2896 void Assembler::MoveImmediate(const Address& dst, const Immediate& imm, 2903 void Assembler::MoveImmediate(const Address& dst, const Immediate& imm) {
2897 Register pp) { 2904 if (imm.is_int32()) {
2898 if (CanLoadImmediateFromPool(imm, pp)) { 2905 movq(dst, imm);
2899 LoadImmediate(TMP, imm, pp); 2906 } else {
2907 LoadImmediate(TMP, imm);
2900 movq(dst, TMP); 2908 movq(dst, TMP);
2901 } else {
2902 movq(dst, imm);
2903 } 2909 }
2904 } 2910 }
2905 2911
2906 2912
2907 // Destroys the value register. 2913 // Destroys the value register.
2908 void Assembler::StoreIntoObjectFilterNoSmi(Register object, 2914 void Assembler::StoreIntoObjectFilterNoSmi(Register object,
2909 Register value, 2915 Register value,
2910 Label* no_update) { 2916 Label* no_update) {
2911 COMPILE_ASSERT((kNewObjectAlignmentOffset == kWordSize) && 2917 COMPILE_ASSERT((kNewObjectAlignmentOffset == kWordSize) &&
2912 (kOldObjectAlignmentOffset == 0)); 2918 (kOldObjectAlignmentOffset == 0));
(...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after
3078 Bind(&done); 3084 Bind(&done);
3079 popq(value); 3085 popq(value);
3080 #endif // defined(DEBUG) 3086 #endif // defined(DEBUG)
3081 // No store buffer update. 3087 // No store buffer update.
3082 } 3088 }
3083 3089
3084 3090
3085 void Assembler::StoreIntoObjectNoBarrier(Register object, 3091 void Assembler::StoreIntoObjectNoBarrier(Register object,
3086 const Address& dest, 3092 const Address& dest,
3087 const Object& value, 3093 const Object& value,
3088 Register pp,
3089 FieldContent old_content) { 3094 FieldContent old_content) {
3090 VerifyHeapWord(dest, old_content); 3095 VerifyHeapWord(dest, old_content);
3091 if (VerifiedMemory::enabled()) { 3096 if (VerifiedMemory::enabled()) {
3092 Register temp = (pp == RCX) ? RDX : RCX; 3097 const Register temp = RCX;
3093 pushq(temp); 3098 pushq(temp);
3094 leaq(temp, dest); 3099 leaq(temp, dest);
3095 StoreObject(Address(temp, 0), value, pp); 3100 StoreObject(Address(temp, 0), value);
3096 StoreObject(Address(temp, VerifiedMemory::offset()), value, pp); 3101 StoreObject(Address(temp, VerifiedMemory::offset()), value);
3097 popq(temp); 3102 popq(temp);
3098 } else { 3103 } else {
3099 StoreObject(dest, value, pp); 3104 StoreObject(dest, value);
3100 } 3105 }
3101 // TODO(koda): Use 'object', verify that generational barrier's not needed. 3106 // TODO(koda): Use 'object', verify that generational barrier's not needed.
3102 } 3107 }
3103 3108
3104 3109
3105 void Assembler::StoreIntoSmiField(const Address& dest, Register value) { 3110 void Assembler::StoreIntoSmiField(const Address& dest, Register value) {
3106 #if defined(DEBUG) 3111 #if defined(DEBUG)
3107 Label done; 3112 Label done;
3108 testq(value, Immediate(kHeapObjectTag)); 3113 testq(value, Immediate(kHeapObjectTag));
3109 j(ZERO, &done); 3114 j(ZERO, &done);
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
3146 } 3151 }
3147 3152
3148 3153
3149 void Assembler::DoubleNegate(XmmRegister d) { 3154 void Assembler::DoubleNegate(XmmRegister d) {
3150 static const struct ALIGN16 { 3155 static const struct ALIGN16 {
3151 uint64_t a; 3156 uint64_t a;
3152 uint64_t b; 3157 uint64_t b;
3153 } double_negate_constant = 3158 } double_negate_constant =
3154 {0x8000000000000000LL, 0x8000000000000000LL}; 3159 {0x8000000000000000LL, 0x8000000000000000LL};
3155 LoadImmediate( 3160 LoadImmediate(
3156 TMP, Immediate(reinterpret_cast<intptr_t>(&double_negate_constant)), PP); 3161 TMP, Immediate(reinterpret_cast<intptr_t>(&double_negate_constant)));
3157 xorpd(d, Address(TMP, 0)); 3162 xorpd(d, Address(TMP, 0));
3158 } 3163 }
3159 3164
3160 3165
3161 void Assembler::DoubleAbs(XmmRegister reg) { 3166 void Assembler::DoubleAbs(XmmRegister reg) {
3162 static const struct ALIGN16 { 3167 static const struct ALIGN16 {
3163 uint64_t a; 3168 uint64_t a;
3164 uint64_t b; 3169 uint64_t b;
3165 } double_abs_constant = 3170 } double_abs_constant =
3166 {0x7FFFFFFFFFFFFFFFLL, 0x7FFFFFFFFFFFFFFFLL}; 3171 {0x7FFFFFFFFFFFFFFFLL, 0x7FFFFFFFFFFFFFFFLL};
3167 LoadImmediate(TMP, 3172 LoadImmediate(TMP,
3168 Immediate(reinterpret_cast<intptr_t>(&double_abs_constant)), PP); 3173 Immediate(reinterpret_cast<intptr_t>(&double_abs_constant)));
3169 andpd(reg, Address(TMP, 0)); 3174 andpd(reg, Address(TMP, 0));
3170 } 3175 }
3171 3176
3172 3177
3173 void Assembler::Stop(const char* message, bool fixed_length_encoding) { 3178 void Assembler::Stop(const char* message, bool fixed_length_encoding) {
3174 int64_t message_address = reinterpret_cast<int64_t>(message); 3179 int64_t message_address = reinterpret_cast<int64_t>(message);
3175 if (FLAG_print_stop_message) { 3180 if (FLAG_print_stop_message) {
3176 pushq(TMP); // Preserve TMP register. 3181 pushq(TMP); // Preserve TMP register.
3177 pushq(RDI); // Preserve RDI register. 3182 pushq(RDI); // Preserve RDI register.
3178 if (fixed_length_encoding) { 3183 if (fixed_length_encoding) {
3179 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 3184 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
3180 EmitRegisterREX(RDI, REX_W); 3185 EmitRegisterREX(RDI, REX_W);
3181 EmitUint8(0xB8 | (RDI & 7)); 3186 EmitUint8(0xB8 | (RDI & 7));
3182 EmitInt64(message_address); 3187 EmitInt64(message_address);
3183 } else { 3188 } else {
3184 LoadImmediate(RDI, Immediate(message_address), PP); 3189 LoadImmediate(RDI, Immediate(message_address));
3185 } 3190 }
3186 call(&StubCode::PrintStopMessageLabel()); 3191 call(&StubCode::PrintStopMessageLabel());
3187 popq(RDI); // Restore RDI register. 3192 popq(RDI); // Restore RDI register.
3188 popq(TMP); // Restore TMP register. 3193 popq(TMP); // Restore TMP register.
3189 } else { 3194 } else {
3190 // Emit the lower half and the higher half of the message address as 3195 // Emit the lower half and the higher half of the message address as
3191 // immediate operands in the test rax instructions. 3196 // immediate operands in the test rax instructions.
3192 testl(RAX, Immediate(Utils::Low32Bits(message_address))); 3197 testl(RAX, Immediate(Utils::Low32Bits(message_address)));
3193 testl(RAX, Immediate(Utils::High32Bits(message_address))); 3198 testl(RAX, Immediate(Utils::High32Bits(message_address)));
3194 } 3199 }
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
3252 if (OS::ActivationFrameAlignment() > 1) { 3257 if (OS::ActivationFrameAlignment() > 1) {
3253 andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1))); 3258 andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1)));
3254 } 3259 }
3255 } 3260 }
3256 3261
3257 3262
3258 void Assembler::PushRegisters(intptr_t cpu_register_set, 3263 void Assembler::PushRegisters(intptr_t cpu_register_set,
3259 intptr_t xmm_register_set) { 3264 intptr_t xmm_register_set) {
3260 const intptr_t xmm_regs_count = RegisterSet::RegisterCount(xmm_register_set); 3265 const intptr_t xmm_regs_count = RegisterSet::RegisterCount(xmm_register_set);
3261 if (xmm_regs_count > 0) { 3266 if (xmm_regs_count > 0) {
3262 AddImmediate(RSP, Immediate(-xmm_regs_count * kFpuRegisterSize), PP); 3267 AddImmediate(RSP, Immediate(-xmm_regs_count * kFpuRegisterSize));
3263 // Store XMM registers with the lowest register number at the lowest 3268 // Store XMM registers with the lowest register number at the lowest
3264 // address. 3269 // address.
3265 intptr_t offset = 0; 3270 intptr_t offset = 0;
3266 for (intptr_t reg_idx = 0; reg_idx < kNumberOfXmmRegisters; ++reg_idx) { 3271 for (intptr_t reg_idx = 0; reg_idx < kNumberOfXmmRegisters; ++reg_idx) {
3267 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx); 3272 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx);
3268 if (RegisterSet::Contains(xmm_register_set, xmm_reg)) { 3273 if (RegisterSet::Contains(xmm_register_set, xmm_reg)) {
3269 movups(Address(RSP, offset), xmm_reg); 3274 movups(Address(RSP, offset), xmm_reg);
3270 offset += kFpuRegisterSize; 3275 offset += kFpuRegisterSize;
3271 } 3276 }
3272 } 3277 }
(...skipping 28 matching lines...) Expand all
3301 // XMM registers have the lowest register number at the lowest address. 3306 // XMM registers have the lowest register number at the lowest address.
3302 intptr_t offset = 0; 3307 intptr_t offset = 0;
3303 for (intptr_t reg_idx = 0; reg_idx < kNumberOfXmmRegisters; ++reg_idx) { 3308 for (intptr_t reg_idx = 0; reg_idx < kNumberOfXmmRegisters; ++reg_idx) {
3304 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx); 3309 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx);
3305 if (RegisterSet::Contains(xmm_register_set, xmm_reg)) { 3310 if (RegisterSet::Contains(xmm_register_set, xmm_reg)) {
3306 movups(xmm_reg, Address(RSP, offset)); 3311 movups(xmm_reg, Address(RSP, offset));
3307 offset += kFpuRegisterSize; 3312 offset += kFpuRegisterSize;
3308 } 3313 }
3309 } 3314 }
3310 ASSERT(offset == (xmm_regs_count * kFpuRegisterSize)); 3315 ASSERT(offset == (xmm_regs_count * kFpuRegisterSize));
3311 AddImmediate(RSP, Immediate(offset), PP); 3316 AddImmediate(RSP, Immediate(offset));
3312 } 3317 }
3313 } 3318 }
3314 3319
3315 3320
3316 void Assembler::EnterCallRuntimeFrame(intptr_t frame_space) { 3321 void Assembler::EnterCallRuntimeFrame(intptr_t frame_space) {
3317 EnterFrame(0); 3322 EnterFrame(0);
3318 3323
3319 // TODO(vegorov): avoid saving FpuTMP, it is used only as scratch. 3324 // TODO(vegorov): avoid saving FpuTMP, it is used only as scratch.
3320 PushRegisters(CallingConventions::kVolatileCpuRegisters, 3325 PushRegisters(CallingConventions::kVolatileCpuRegisters,
3321 CallingConventions::kVolatileXmmRegisters); 3326 CallingConventions::kVolatileXmmRegisters);
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
3371 3376
3372 void Assembler::LoadPoolPointer(Register pp) { 3377 void Assembler::LoadPoolPointer(Register pp) {
3373 // Load new pool pointer. 3378 // Load new pool pointer.
3374 const intptr_t kRIPRelativeMovqSize = 7; 3379 const intptr_t kRIPRelativeMovqSize = 7;
3375 const intptr_t entry_to_rip_offset = CodeSize() + kRIPRelativeMovqSize; 3380 const intptr_t entry_to_rip_offset = CodeSize() + kRIPRelativeMovqSize;
3376 const intptr_t object_pool_pc_dist = 3381 const intptr_t object_pool_pc_dist =
3377 Instructions::HeaderSize() - Instructions::object_pool_offset(); 3382 Instructions::HeaderSize() - Instructions::object_pool_offset();
3378 movq(pp, Address::AddressRIPRelative( 3383 movq(pp, Address::AddressRIPRelative(
3379 -entry_to_rip_offset - object_pool_pc_dist)); 3384 -entry_to_rip_offset - object_pool_pc_dist));
3380 ASSERT(CodeSize() == entry_to_rip_offset); 3385 ASSERT(CodeSize() == entry_to_rip_offset);
3386 set_constant_pool_allowed(pp == PP);
3381 } 3387 }
3382 3388
3383 3389
3384 void Assembler::EnterDartFrameWithInfo(intptr_t frame_size, 3390 void Assembler::EnterDartFrameWithInfo(intptr_t frame_size,
3385 Register new_pp, 3391 Register new_pp,
3386 Register pc_marker_override) { 3392 Register pc_marker_override) {
3393 ASSERT(!constant_pool_allowed());
3387 EnterFrame(0); 3394 EnterFrame(0);
3388 pushq(pc_marker_override); 3395 pushq(pc_marker_override);
3389 pushq(PP); 3396 pushq(PP);
3390 movq(PP, new_pp); 3397 movq(PP, new_pp);
3398 set_constant_pool_allowed(true);
3391 if (frame_size != 0) { 3399 if (frame_size != 0) {
3392 subq(RSP, Immediate(frame_size)); 3400 subq(RSP, Immediate(frame_size));
3393 } 3401 }
3394 } 3402 }
3395 3403
3396 3404
3397 void Assembler::LeaveDartFrame() { 3405 void Assembler::LeaveDartFrame() {
3406 // LeaveDartFrame is called from stubs (pp disallowed) and from Dart code (pp
3407 // allowed), so there is no point in checking the current value of
3408 // constant_pool_allowed().
3409 set_constant_pool_allowed(false);
3398 // Restore caller's PP register that was pushed in EnterDartFrame. 3410 // Restore caller's PP register that was pushed in EnterDartFrame.
3399 movq(PP, Address(RBP, (kSavedCallerPpSlotFromFp * kWordSize))); 3411 movq(PP, Address(RBP, (kSavedCallerPpSlotFromFp * kWordSize)));
3400 LeaveFrame(); 3412 LeaveFrame();
3401 } 3413 }
3402 3414
3403 3415
3404 // On entry to a function compiled for OSR, the caller's frame pointer, the 3416 // On entry to a function compiled for OSR, the caller's frame pointer, the
3405 // stack locals, and any copied parameters are already in place. The frame 3417 // stack locals, and any copied parameters are already in place. The frame
3406 // pointer is already set up. The PC marker is not correct for the 3418 // pointer is already set up. The PC marker is not correct for the
3407 // optimized function and there may be extra space for spill slots to 3419 // optimized function and there may be extra space for spill slots to
3408 // allocate. 3420 // allocate.
3409 void Assembler::EnterOsrFrame(intptr_t extra_size, 3421 void Assembler::EnterOsrFrame(intptr_t extra_size,
3410 Register new_pp, 3422 Register new_pp,
3411 Register pc_marker_override) { 3423 Register pc_marker_override) {
3424 ASSERT(!constant_pool_allowed());
3412 if (prologue_offset_ == -1) { 3425 if (prologue_offset_ == -1) {
3413 Comment("PrologueOffset = %" Pd "", CodeSize()); 3426 Comment("PrologueOffset = %" Pd "", CodeSize());
3414 prologue_offset_ = CodeSize(); 3427 prologue_offset_ = CodeSize();
3415 } 3428 }
3416 movq(Address(RBP, kPcMarkerSlotFromFp * kWordSize), pc_marker_override); 3429 movq(Address(RBP, kPcMarkerSlotFromFp * kWordSize), pc_marker_override);
3417 movq(PP, new_pp); 3430 movq(PP, new_pp);
3431 set_constant_pool_allowed(true);
3418 if (extra_size != 0) { 3432 if (extra_size != 0) {
3419 subq(RSP, Immediate(extra_size)); 3433 subq(RSP, Immediate(extra_size));
3420 } 3434 }
3421 } 3435 }
3422 3436
3423 3437
3424 void Assembler::EnterStubFrame() { 3438 void Assembler::EnterStubFrame() {
3439 set_constant_pool_allowed(false);
3425 EnterFrame(0); 3440 EnterFrame(0);
3426 pushq(Immediate(0)); // Push 0 in the saved PC area for stub frames. 3441 pushq(Immediate(0)); // Push 0 in the saved PC area for stub frames.
3427 pushq(PP); // Save caller's pool pointer 3442 pushq(PP); // Save caller's pool pointer
3428 LoadPoolPointer(PP); 3443 LoadPoolPointer();
3429 } 3444 }
3430 3445
3431 3446
3432 void Assembler::LeaveStubFrame() { 3447 void Assembler::LeaveStubFrame() {
3448 set_constant_pool_allowed(false);
3433 // Restore caller's PP register that was pushed in EnterStubFrame. 3449 // Restore caller's PP register that was pushed in EnterStubFrame.
3434 movq(PP, Address(RBP, (kSavedCallerPpSlotFromFp * kWordSize))); 3450 movq(PP, Address(RBP, (kSavedCallerPpSlotFromFp * kWordSize)));
3435 LeaveFrame(); 3451 LeaveFrame();
3436 } 3452 }
3437 3453
3438 3454
3439 void Assembler::MaybeTraceAllocation(intptr_t cid, 3455 void Assembler::MaybeTraceAllocation(intptr_t cid,
3440 Label* trace, 3456 Label* trace,
3441 bool near_jump, 3457 bool near_jump,
3442 bool inline_isolate) { 3458 bool inline_isolate) {
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
3514 UpdateAllocationStats(cid, space, inline_isolate); 3530 UpdateAllocationStats(cid, space, inline_isolate);
3515 Register temp_reg = TMP; 3531 Register temp_reg = TMP;
3516 intptr_t size_offset = ClassTable::SizeOffsetFor(cid, space == Heap::kNew); 3532 intptr_t size_offset = ClassTable::SizeOffsetFor(cid, space == Heap::kNew);
3517 addq(Address(temp_reg, size_offset), Immediate(size_in_bytes)); 3533 addq(Address(temp_reg, size_offset), Immediate(size_in_bytes));
3518 } 3534 }
3519 3535
3520 3536
3521 void Assembler::TryAllocate(const Class& cls, 3537 void Assembler::TryAllocate(const Class& cls,
3522 Label* failure, 3538 Label* failure,
3523 bool near_jump, 3539 bool near_jump,
3524 Register instance_reg, 3540 Register instance_reg) {
3525 Register pp) {
3526 ASSERT(failure != NULL); 3541 ASSERT(failure != NULL);
3527 if (FLAG_inline_alloc) { 3542 if (FLAG_inline_alloc) {
3528 // If this allocation is traced, program will jump to failure path 3543 // If this allocation is traced, program will jump to failure path
3529 // (i.e. the allocation stub) which will allocate the object and trace the 3544 // (i.e. the allocation stub) which will allocate the object and trace the
3530 // allocation call site. 3545 // allocation call site.
3531 MaybeTraceAllocation(cls.id(), failure, near_jump); 3546 MaybeTraceAllocation(cls.id(), failure, near_jump);
3532 Heap* heap = Isolate::Current()->heap(); 3547 Heap* heap = Isolate::Current()->heap();
3533 const intptr_t instance_size = cls.instance_size(); 3548 const intptr_t instance_size = cls.instance_size();
3534 Heap::Space space = heap->SpaceForAllocation(cls.id()); 3549 Heap::Space space = heap->SpaceForAllocation(cls.id());
3535 LoadImmediate(TMP, Immediate(heap->TopAddress(space)), pp); 3550 LoadImmediate(TMP, Immediate(heap->TopAddress(space)));
3536 movq(instance_reg, Address(TMP, 0)); 3551 movq(instance_reg, Address(TMP, 0));
3537 AddImmediate(instance_reg, Immediate(instance_size), pp); 3552 AddImmediate(instance_reg, Immediate(instance_size));
3538 // instance_reg: potential next object start. 3553 // instance_reg: potential next object start.
3539 LoadImmediate(TMP, Immediate(heap->EndAddress(space)), pp); 3554 LoadImmediate(TMP, Immediate(heap->EndAddress(space)));
3540 cmpq(instance_reg, Address(TMP, 0)); 3555 cmpq(instance_reg, Address(TMP, 0));
3541 j(ABOVE_EQUAL, failure, near_jump); 3556 j(ABOVE_EQUAL, failure, near_jump);
3542 // Successfully allocated the object, now update top to point to 3557 // Successfully allocated the object, now update top to point to
3543 // next object start and store the class in the class field of object. 3558 // next object start and store the class in the class field of object.
3544 LoadImmediate(TMP, Immediate(heap->TopAddress(space)), pp); 3559 LoadImmediate(TMP, Immediate(heap->TopAddress(space)));
3545 movq(Address(TMP, 0), instance_reg); 3560 movq(Address(TMP, 0), instance_reg);
3546 UpdateAllocationStats(cls.id(), space); 3561 UpdateAllocationStats(cls.id(), space);
3547 ASSERT(instance_size >= kHeapObjectTag); 3562 ASSERT(instance_size >= kHeapObjectTag);
3548 AddImmediate(instance_reg, Immediate(kHeapObjectTag - instance_size), pp); 3563 AddImmediate(instance_reg, Immediate(kHeapObjectTag - instance_size));
3549 uword tags = 0; 3564 uword tags = 0;
3550 tags = RawObject::SizeTag::update(instance_size, tags); 3565 tags = RawObject::SizeTag::update(instance_size, tags);
3551 ASSERT(cls.id() != kIllegalCid); 3566 ASSERT(cls.id() != kIllegalCid);
3552 tags = RawObject::ClassIdTag::update(cls.id(), tags); 3567 tags = RawObject::ClassIdTag::update(cls.id(), tags);
3553 MoveImmediate(FieldAddress(instance_reg, Object::tags_offset()), 3568 MoveImmediate(FieldAddress(instance_reg, Object::tags_offset()),
3554 Immediate(tags), pp); 3569 Immediate(tags));
3555 } else { 3570 } else {
3556 jmp(failure); 3571 jmp(failure);
3557 } 3572 }
3558 } 3573 }
3559 3574
3560 3575
3561 void Assembler::TryAllocateArray(intptr_t cid, 3576 void Assembler::TryAllocateArray(intptr_t cid,
3562 intptr_t instance_size, 3577 intptr_t instance_size,
3563 Label* failure, 3578 Label* failure,
3564 bool near_jump, 3579 bool near_jump,
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after
3744 void Assembler::LoadClassId(Register result, Register object) { 3759 void Assembler::LoadClassId(Register result, Register object) {
3745 ASSERT(RawObject::kClassIdTagPos == kBitsPerInt32); 3760 ASSERT(RawObject::kClassIdTagPos == kBitsPerInt32);
3746 ASSERT(RawObject::kClassIdTagSize == kBitsPerInt32); 3761 ASSERT(RawObject::kClassIdTagSize == kBitsPerInt32);
3747 ASSERT(sizeof(classid_t) == sizeof(uint32_t)); 3762 ASSERT(sizeof(classid_t) == sizeof(uint32_t));
3748 const intptr_t class_id_offset = Object::tags_offset() + 3763 const intptr_t class_id_offset = Object::tags_offset() +
3749 RawObject::kClassIdTagPos / kBitsPerByte; 3764 RawObject::kClassIdTagPos / kBitsPerByte;
3750 movl(result, FieldAddress(object, class_id_offset)); 3765 movl(result, FieldAddress(object, class_id_offset));
3751 } 3766 }
3752 3767
3753 3768
3754 void Assembler::LoadClassById(Register result, Register class_id, Register pp) { 3769 void Assembler::LoadClassById(Register result, Register class_id) {
3755 ASSERT(result != class_id); 3770 ASSERT(result != class_id);
3756 LoadIsolate(result); 3771 LoadIsolate(result);
3757 const intptr_t offset = 3772 const intptr_t offset =
3758 Isolate::class_table_offset() + ClassTable::table_offset(); 3773 Isolate::class_table_offset() + ClassTable::table_offset();
3759 movq(result, Address(result, offset)); 3774 movq(result, Address(result, offset));
3760 movq(result, Address(result, class_id, TIMES_8, 0)); 3775 movq(result, Address(result, class_id, TIMES_8, 0));
3761 } 3776 }
3762 3777
3763 3778
3764 void Assembler::LoadClass(Register result, Register object, Register pp) { 3779 void Assembler::LoadClass(Register result, Register object) {
3765 LoadClassId(TMP, object); 3780 LoadClassId(TMP, object);
3766 LoadClassById(result, TMP, pp); 3781 LoadClassById(result, TMP);
3767 } 3782 }
3768 3783
3769 3784
3770 void Assembler::CompareClassId(Register object, intptr_t class_id) { 3785 void Assembler::CompareClassId(Register object, intptr_t class_id) {
3771 LoadClassId(TMP, object); 3786 LoadClassId(TMP, object);
3772 cmpl(TMP, Immediate(class_id)); 3787 cmpl(TMP, Immediate(class_id));
3773 } 3788 }
3774 3789
3775 3790
3776 void Assembler::SmiUntagOrCheckClass(Register object, 3791 void Assembler::SmiUntagOrCheckClass(Register object,
(...skipping 14 matching lines...) Expand all
3791 movl(TMP, Address(object, TIMES_2, class_id_offset)); 3806 movl(TMP, Address(object, TIMES_2, class_id_offset));
3792 cmpl(TMP, Immediate(class_id)); 3807 cmpl(TMP, Immediate(class_id));
3793 } 3808 }
3794 3809
3795 3810
3796 void Assembler::LoadClassIdMayBeSmi(Register result, Register object) { 3811 void Assembler::LoadClassIdMayBeSmi(Register result, Register object) {
3797 ASSERT(result != object); 3812 ASSERT(result != object);
3798 3813
3799 // Load up a null object. We only need it so we can use LoadClassId on it in 3814 // Load up a null object. We only need it so we can use LoadClassId on it in
3800 // the case that object is a Smi. 3815 // the case that object is a Smi.
3801 LoadObject(result, Object::null_object(), PP); 3816 LoadObject(result, Object::null_object());
3802 // Check if the object is a Smi. 3817 // Check if the object is a Smi.
3803 testq(object, Immediate(kSmiTagMask)); 3818 testq(object, Immediate(kSmiTagMask));
3804 // If the object *is* a Smi, use the null object instead. 3819 // If the object *is* a Smi, use the null object instead.
3805 cmoveq(object, result); 3820 cmoveq(object, result);
3806 // Loads either the cid of the object if it isn't a Smi, or the cid of null 3821 // Loads either the cid of the object if it isn't a Smi, or the cid of null
3807 // if it is a Smi, which will be ignored. 3822 // if it is a Smi, which will be ignored.
3808 LoadClassId(result, object); 3823 LoadClassId(result, object);
3809 3824
3810 movq(object, Immediate(kSmiCid)); 3825 movq(object, Immediate(kSmiCid));
3811 // If object is a Smi, move the Smi cid into result. o/w leave alone. 3826 // If object is a Smi, move the Smi cid into result. o/w leave alone.
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after
3931 3946
3932 3947
3933 const char* Assembler::FpuRegisterName(FpuRegister reg) { 3948 const char* Assembler::FpuRegisterName(FpuRegister reg) {
3934 ASSERT((0 <= reg) && (reg < kNumberOfXmmRegisters)); 3949 ASSERT((0 <= reg) && (reg < kNumberOfXmmRegisters));
3935 return xmm_reg_names[reg]; 3950 return xmm_reg_names[reg];
3936 } 3951 }
3937 3952
3938 } // namespace dart 3953 } // namespace dart
3939 3954
3940 #endif // defined TARGET_ARCH_X64 3955 #endif // defined TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « runtime/vm/assembler_x64.h ('k') | runtime/vm/assembler_x64_test.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698