OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/heap.h" | 9 #include "vm/heap.h" |
10 #include "vm/memory_region.h" | 10 #include "vm/memory_region.h" |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
58 reinterpret_cast<DetectCPUFeatures>(instructions.EntryPoint())(); | 58 reinterpret_cast<DetectCPUFeatures>(instructions.EntryPoint())(); |
59 sse4_1_supported_ = (features & kSSE4_1BitMask) != 0; | 59 sse4_1_supported_ = (features & kSSE4_1BitMask) != 0; |
60 #ifdef DEBUG | 60 #ifdef DEBUG |
61 initialized_ = true; | 61 initialized_ = true; |
62 #endif | 62 #endif |
63 } | 63 } |
64 | 64 |
65 #undef __ | 65 #undef __ |
66 | 66 |
67 | 67 |
| 68 Assembler::Assembler(bool use_far_branches) |
| 69 : buffer_(), |
| 70 object_pool_(GrowableObjectArray::Handle()), |
| 71 patchable_pool_entries_(), |
| 72 prologue_offset_(-1), |
| 73 comments_() { |
| 74 // Far branching mode is only needed and implemented for MIPS and ARM. |
| 75 ASSERT(!use_far_branches); |
| 76 if (Isolate::Current() != Dart::vm_isolate()) { |
| 77 object_pool_ = GrowableObjectArray::New(Heap::kOld); |
| 78 |
| 79 // These objects and labels need to be accessible through every pool-pointer |
| 80 // at the same index. |
| 81 object_pool_.Add(Object::Handle(), Heap::kOld); |
| 82 patchable_pool_entries_.Add(kNotPatchable); |
| 83 |
| 84 object_pool_.Add(Bool::True(), Heap::kOld); |
| 85 patchable_pool_entries_.Add(kNotPatchable); |
| 86 |
| 87 object_pool_.Add(Bool::False(), Heap::kOld); |
| 88 patchable_pool_entries_.Add(kNotPatchable); |
| 89 |
| 90 if (StubCode::UpdateStoreBuffer_entry() != NULL) { |
| 91 FindExternalLabel(&StubCode::UpdateStoreBufferLabel(), kNotPatchable); |
| 92 patchable_pool_entries_.Add(kNotPatchable); |
| 93 } else { |
| 94 object_pool_.Add(Object::Handle(), Heap::kOld); |
| 95 patchable_pool_entries_.Add(kNotPatchable); |
| 96 } |
| 97 |
| 98 if (StubCode::CallToRuntime_entry() != NULL) { |
| 99 FindExternalLabel(&StubCode::CallToRuntimeLabel(), kNotPatchable); |
| 100 patchable_pool_entries_.Add(kNotPatchable); |
| 101 } else { |
| 102 object_pool_.Add(Object::Handle(), Heap::kOld); |
| 103 patchable_pool_entries_.Add(kNotPatchable); |
| 104 } |
| 105 } |
| 106 } |
| 107 |
| 108 |
68 void Assembler::InitializeMemoryWithBreakpoints(uword data, int length) { | 109 void Assembler::InitializeMemoryWithBreakpoints(uword data, int length) { |
69 memset(reinterpret_cast<void*>(data), Instr::kBreakPointInstruction, length); | 110 memset(reinterpret_cast<void*>(data), Instr::kBreakPointInstruction, length); |
70 } | 111 } |
71 | 112 |
72 | 113 |
73 void Assembler::call(Register reg) { | 114 void Assembler::call(Register reg) { |
74 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 115 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
75 Operand operand(reg); | 116 Operand operand(reg); |
76 EmitOperandREX(2, operand, REX_NONE); | 117 EmitOperandREX(2, operand, REX_NONE); |
77 EmitUint8(0xFF); | 118 EmitUint8(0xFF); |
(...skipping 10 matching lines...) Expand all Loading... |
88 | 129 |
89 | 130 |
90 void Assembler::call(Label* label) { | 131 void Assembler::call(Label* label) { |
91 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 132 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
92 static const int kSize = 5; | 133 static const int kSize = 5; |
93 EmitUint8(0xE8); | 134 EmitUint8(0xE8); |
94 EmitLabel(label, kSize); | 135 EmitLabel(label, kSize); |
95 } | 136 } |
96 | 137 |
97 | 138 |
| 139 void Assembler::LoadExternalLabel(Register dst, |
| 140 const ExternalLabel* label, |
| 141 Patchability patchable, |
| 142 Register pp) { |
| 143 const int32_t offset = |
| 144 Array::element_offset(FindExternalLabel(label, patchable)); |
| 145 LoadWordFromPoolOffset(dst, pp, offset - kHeapObjectTag); |
| 146 } |
| 147 |
| 148 |
98 void Assembler::call(const ExternalLabel* label) { | 149 void Assembler::call(const ExternalLabel* label) { |
99 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 150 { // Encode movq(TMP, Immediate(label->address())), but always as imm64. |
| 151 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 152 EmitRegisterREX(TMP, REX_W); |
| 153 EmitUint8(0xB8 | (TMP & 7)); |
| 154 EmitInt64(label->address()); |
| 155 } |
| 156 call(TMP); |
| 157 } |
| 158 |
| 159 |
| 160 void Assembler::CallPatchable(const ExternalLabel* label) { |
100 intptr_t call_start = buffer_.GetPosition(); | 161 intptr_t call_start = buffer_.GetPosition(); |
| 162 LoadExternalLabel(TMP, label, kPatchable, PP); |
| 163 call(TMP); |
| 164 ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize); |
| 165 } |
101 | 166 |
102 // Encode movq(TMP, Immediate(label->address())), but always as imm64. | |
103 EmitRegisterREX(TMP, REX_W); | |
104 EmitUint8(0xB8 | (TMP & 7)); | |
105 EmitInt64(label->address()); | |
106 | 167 |
107 // Encode call(TMP). | 168 void Assembler::Call(const ExternalLabel* label, Register pp) { |
108 Operand operand(TMP); | 169 if (Isolate::Current() == Dart::vm_isolate()) { |
109 EmitOperandREX(2, operand, REX_NONE); | 170 call(label); |
110 EmitUint8(0xFF); | 171 } else { |
111 EmitOperand(2, operand); | 172 LoadExternalLabel(TMP, label, kNotPatchable, pp); |
112 | 173 call(TMP); |
113 ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize); | 174 } |
114 } | 175 } |
115 | 176 |
116 | 177 |
117 void Assembler::pushq(Register reg) { | 178 void Assembler::pushq(Register reg) { |
118 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 179 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
119 EmitRegisterREX(reg, REX_NONE); | 180 EmitRegisterREX(reg, REX_NONE); |
120 EmitUint8(0x50 | (reg & 7)); | 181 EmitUint8(0x50 | (reg & 7)); |
121 } | 182 } |
122 | 183 |
123 | 184 |
(...skipping 1829 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1953 | 2014 |
1954 | 2015 |
1955 void Assembler::j(Condition condition, const ExternalLabel* label) { | 2016 void Assembler::j(Condition condition, const ExternalLabel* label) { |
1956 Label no_jump; | 2017 Label no_jump; |
1957 j(static_cast<Condition>(condition ^ 1), &no_jump); // Negate condition. | 2018 j(static_cast<Condition>(condition ^ 1), &no_jump); // Negate condition. |
1958 jmp(label); | 2019 jmp(label); |
1959 Bind(&no_jump); | 2020 Bind(&no_jump); |
1960 } | 2021 } |
1961 | 2022 |
1962 | 2023 |
| 2024 void Assembler::J(Condition condition, const ExternalLabel* label, |
| 2025 Register pp) { |
| 2026 Label no_jump; |
| 2027 j(static_cast<Condition>(condition ^ 1), &no_jump); // Negate condition. |
| 2028 Jmp(label, pp); |
| 2029 Bind(&no_jump); |
| 2030 } |
| 2031 |
| 2032 |
1963 void Assembler::jmp(Register reg) { | 2033 void Assembler::jmp(Register reg) { |
1964 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 2034 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
1965 Operand operand(reg); | 2035 Operand operand(reg); |
1966 EmitOperandREX(4, operand, REX_NONE); | 2036 EmitOperandREX(4, operand, REX_NONE); |
1967 EmitUint8(0xFF); | 2037 EmitUint8(0xFF); |
1968 EmitOperand(4, operand); | 2038 EmitOperand(4, operand); |
1969 } | 2039 } |
1970 | 2040 |
1971 | 2041 |
1972 void Assembler::jmp(Label* label, bool near) { | 2042 void Assembler::jmp(Label* label, bool near) { |
(...skipping 14 matching lines...) Expand all Loading... |
1987 EmitUint8(0xEB); | 2057 EmitUint8(0xEB); |
1988 EmitNearLabelLink(label); | 2058 EmitNearLabelLink(label); |
1989 } else { | 2059 } else { |
1990 EmitUint8(0xE9); | 2060 EmitUint8(0xE9); |
1991 EmitLabelLink(label); | 2061 EmitLabelLink(label); |
1992 } | 2062 } |
1993 } | 2063 } |
1994 | 2064 |
1995 | 2065 |
1996 void Assembler::jmp(const ExternalLabel* label) { | 2066 void Assembler::jmp(const ExternalLabel* label) { |
1997 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 2067 { // Encode movq(TMP, Immediate(label->address())), but always as imm64. |
| 2068 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 2069 EmitRegisterREX(TMP, REX_W); |
| 2070 EmitUint8(0xB8 | (TMP & 7)); |
| 2071 EmitInt64(label->address()); |
| 2072 } |
| 2073 jmp(TMP); |
| 2074 } |
| 2075 |
| 2076 |
| 2077 void Assembler::JmpPatchable(const ExternalLabel* label, Register pp) { |
1998 intptr_t call_start = buffer_.GetPosition(); | 2078 intptr_t call_start = buffer_.GetPosition(); |
| 2079 LoadExternalLabel(TMP, label, kPatchable, pp); |
| 2080 jmp(TMP); |
| 2081 ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize); |
| 2082 } |
1999 | 2083 |
2000 // Encode movq(TMP, Immediate(label->address())), but always as imm64. | |
2001 EmitRegisterREX(TMP, REX_W); | |
2002 EmitUint8(0xB8 | (TMP & 7)); | |
2003 EmitInt64(label->address()); | |
2004 | 2084 |
2005 // Encode jmp(TMP). | 2085 void Assembler::Jmp(const ExternalLabel* label, Register pp) { |
2006 Operand operand(TMP); | 2086 LoadExternalLabel(TMP, label, kNotPatchable, pp); |
2007 EmitOperandREX(4, operand, REX_NONE); | 2087 jmp(TMP); |
2008 EmitUint8(0xFF); | |
2009 EmitOperand(4, operand); | |
2010 | |
2011 ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize); | |
2012 } | 2088 } |
2013 | 2089 |
2014 | 2090 |
2015 void Assembler::lock() { | 2091 void Assembler::lock() { |
2016 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 2092 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
2017 EmitUint8(0xF0); | 2093 EmitUint8(0xF0); |
2018 } | 2094 } |
2019 | 2095 |
2020 | 2096 |
2021 void Assembler::cmpxchgl(const Address& address, Register reg) { | 2097 void Assembler::cmpxchgl(const Address& address, Register reg) { |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2084 if (stack_elements <= 4) { | 2160 if (stack_elements <= 4) { |
2085 for (intptr_t i = 0; i < stack_elements; i++) { | 2161 for (intptr_t i = 0; i < stack_elements; i++) { |
2086 popq(TMP); | 2162 popq(TMP); |
2087 } | 2163 } |
2088 return; | 2164 return; |
2089 } | 2165 } |
2090 addq(RSP, Immediate(stack_elements * kWordSize)); | 2166 addq(RSP, Immediate(stack_elements * kWordSize)); |
2091 } | 2167 } |
2092 | 2168 |
2093 | 2169 |
2094 void Assembler::LoadObject(Register dst, const Object& object) { | 2170 intptr_t Assembler::FindObject(const Object& obj, Patchability patchable) { |
2095 if (object.IsSmi() || object.InVMHeap()) { | 2171 // The object pool cannot be used in the vm isolate. |
| 2172 ASSERT(Isolate::Current() != Dart::vm_isolate()); |
| 2173 ASSERT(!object_pool_.IsNull()); |
| 2174 |
| 2175 // TODO(zra): This can be slow. Add a hash map from obj.raw() to |
| 2176 // object pool indexes to speed lookup. |
| 2177 for (int i = 0; i < object_pool_.Length(); i++) { |
| 2178 if ((object_pool_.At(i) == obj.raw()) && |
| 2179 (patchable_pool_entries_[i] != kPatchable)) { |
| 2180 return i; |
| 2181 } |
| 2182 } |
| 2183 object_pool_.Add(obj, Heap::kOld); |
| 2184 patchable_pool_entries_.Add(patchable); |
| 2185 return object_pool_.Length() - 1; |
| 2186 } |
| 2187 |
| 2188 |
| 2189 intptr_t Assembler::FindExternalLabel(const ExternalLabel* label, |
| 2190 Patchability patchable) { |
| 2191 // The object pool cannot be used in the vm isolate. |
| 2192 ASSERT(Isolate::Current() != Dart::vm_isolate()); |
| 2193 ASSERT(!object_pool_.IsNull()); |
| 2194 const uword address = label->address(); |
| 2195 ASSERT(Utils::IsAligned(address, 4)); |
| 2196 // The address is stored in the object array as a RawSmi. |
| 2197 const Smi& smi = Smi::Handle(reinterpret_cast<RawSmi*>(address)); |
| 2198 if (patchable == kNotPatchable) { |
| 2199 return FindObject(smi, kNotPatchable); |
| 2200 } |
| 2201 // If the call is patchable, do not reuse an existing entry since each |
| 2202 // reference may be patched independently. |
| 2203 object_pool_.Add(smi, Heap::kOld); |
| 2204 patchable_pool_entries_.Add(patchable); |
| 2205 return object_pool_.Length() - 1; |
| 2206 } |
| 2207 |
| 2208 |
| 2209 bool Assembler::CanLoadFromObjectPool(const Object& object) { |
| 2210 return !object.IsSmi() && // Not a Smi |
| 2211 // Not in the VMHeap, OR is one of the VMHeap objects we put in every |
| 2212 // object pool. |
| 2213 (!object.InVMHeap() || (object.raw() == Object::null()) || |
| 2214 (object.raw() == Bool::True().raw()) || |
| 2215 (object.raw() == Bool::False().raw())) && |
| 2216 object.IsNotTemporaryScopedHandle() && |
| 2217 object.IsOld(); |
| 2218 } |
| 2219 |
| 2220 |
| 2221 void Assembler::LoadWordFromPoolOffset(Register dst, Register pp, |
| 2222 int32_t offset) { |
| 2223 // This sequence must be of fixed size. AddressBaseImm32 |
| 2224 // forces the address operand to use a fixed-size imm32 encoding. |
| 2225 movq(dst, Address::AddressBaseImm32(pp, offset)); |
| 2226 } |
| 2227 |
| 2228 |
| 2229 void Assembler::LoadObject(Register dst, const Object& object, Register pp) { |
| 2230 if (CanLoadFromObjectPool(object)) { |
| 2231 const int32_t offset = |
| 2232 Array::element_offset(FindObject(object, kNotPatchable)); |
| 2233 LoadWordFromPoolOffset(dst, pp, offset - kHeapObjectTag); |
| 2234 } else { |
2096 movq(dst, Immediate(reinterpret_cast<int64_t>(object.raw()))); | 2235 movq(dst, Immediate(reinterpret_cast<int64_t>(object.raw()))); |
2097 } else { | |
2098 ASSERT(object.IsNotTemporaryScopedHandle()); | |
2099 ASSERT(object.IsOld()); | |
2100 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | |
2101 EmitRegisterREX(dst, REX_W); | |
2102 EmitUint8(0xB8 | (dst & 7)); | |
2103 buffer_.EmitObject(object); | |
2104 } | 2236 } |
2105 } | 2237 } |
2106 | 2238 |
2107 | 2239 |
2108 void Assembler::StoreObject(const Address& dst, const Object& object) { | 2240 void Assembler::StoreObject(const Address& dst, const Object& object) { |
2109 if (object.IsSmi() || object.InVMHeap()) { | 2241 if (CanLoadFromObjectPool(object)) { |
| 2242 LoadObject(TMP, object, PP); |
| 2243 movq(dst, TMP); |
| 2244 } else { |
2110 movq(dst, Immediate(reinterpret_cast<int64_t>(object.raw()))); | 2245 movq(dst, Immediate(reinterpret_cast<int64_t>(object.raw()))); |
2111 } else { | |
2112 ASSERT(object.IsNotTemporaryScopedHandle()); | |
2113 ASSERT(object.IsOld()); | |
2114 LoadObject(TMP, object); | |
2115 movq(dst, TMP); | |
2116 } | 2246 } |
2117 } | 2247 } |
2118 | 2248 |
2119 | 2249 |
2120 void Assembler::PushObject(const Object& object) { | 2250 void Assembler::PushObject(const Object& object) { |
2121 if (object.IsSmi() || object.InVMHeap()) { | 2251 if (CanLoadFromObjectPool(object)) { |
| 2252 LoadObject(TMP, object, PP); |
| 2253 pushq(TMP); |
| 2254 } else { |
2122 pushq(Immediate(reinterpret_cast<int64_t>(object.raw()))); | 2255 pushq(Immediate(reinterpret_cast<int64_t>(object.raw()))); |
2123 } else { | |
2124 LoadObject(TMP, object); | |
2125 pushq(TMP); | |
2126 } | 2256 } |
2127 } | 2257 } |
2128 | 2258 |
2129 | 2259 |
2130 void Assembler::CompareObject(Register reg, const Object& object) { | 2260 void Assembler::CompareObject(Register reg, const Object& object) { |
2131 if (object.IsSmi() || object.InVMHeap()) { | 2261 if (CanLoadFromObjectPool(object)) { |
| 2262 ASSERT(reg != TMP); |
| 2263 LoadObject(TMP, object, PP); |
| 2264 cmpq(reg, TMP); |
| 2265 } else { |
2132 cmpq(reg, Immediate(reinterpret_cast<int64_t>(object.raw()))); | 2266 cmpq(reg, Immediate(reinterpret_cast<int64_t>(object.raw()))); |
2133 } else { | |
2134 ASSERT(reg != TMP); | |
2135 LoadObject(TMP, object); | |
2136 cmpq(reg, TMP); | |
2137 } | 2267 } |
2138 } | 2268 } |
2139 | 2269 |
2140 | 2270 |
2141 // Destroys the value register. | 2271 // Destroys the value register. |
2142 void Assembler::StoreIntoObjectFilterNoSmi(Register object, | 2272 void Assembler::StoreIntoObjectFilterNoSmi(Register object, |
2143 Register value, | 2273 Register value, |
2144 Label* no_update) { | 2274 Label* no_update) { |
2145 COMPILE_ASSERT((kNewObjectAlignmentOffset == kWordSize) && | 2275 COMPILE_ASSERT((kNewObjectAlignmentOffset == kWordSize) && |
2146 (kOldObjectAlignmentOffset == 0), young_alignment); | 2276 (kOldObjectAlignmentOffset == 0), young_alignment); |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2189 if (can_value_be_smi) { | 2319 if (can_value_be_smi) { |
2190 StoreIntoObjectFilter(object, value, &done); | 2320 StoreIntoObjectFilter(object, value, &done); |
2191 } else { | 2321 } else { |
2192 StoreIntoObjectFilterNoSmi(object, value, &done); | 2322 StoreIntoObjectFilterNoSmi(object, value, &done); |
2193 } | 2323 } |
2194 // A store buffer update is required. | 2324 // A store buffer update is required. |
2195 if (value != RAX) pushq(RAX); | 2325 if (value != RAX) pushq(RAX); |
2196 if (object != RAX) { | 2326 if (object != RAX) { |
2197 movq(RAX, object); | 2327 movq(RAX, object); |
2198 } | 2328 } |
2199 call(&StubCode::UpdateStoreBufferLabel()); | 2329 Call(&StubCode::UpdateStoreBufferLabel(), PP); |
2200 if (value != RAX) popq(RAX); | 2330 if (value != RAX) popq(RAX); |
2201 Bind(&done); | 2331 Bind(&done); |
2202 } | 2332 } |
2203 | 2333 |
2204 | 2334 |
2205 void Assembler::StoreIntoObjectNoBarrier(Register object, | 2335 void Assembler::StoreIntoObjectNoBarrier(Register object, |
2206 const Address& dest, | 2336 const Address& dest, |
2207 Register value) { | 2337 Register value) { |
2208 movq(dest, value); | 2338 movq(dest, value); |
2209 #if defined(DEBUG) | 2339 #if defined(DEBUG) |
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2291 } | 2421 } |
2292 } | 2422 } |
2293 | 2423 |
2294 | 2424 |
2295 void Assembler::LeaveFrame() { | 2425 void Assembler::LeaveFrame() { |
2296 movq(RSP, RBP); | 2426 movq(RSP, RBP); |
2297 popq(RBP); | 2427 popq(RBP); |
2298 } | 2428 } |
2299 | 2429 |
2300 | 2430 |
| 2431 void Assembler::LeaveFrameWithPP() { |
| 2432 movq(PP, Address(RBP, -2 * kWordSize)); |
| 2433 LeaveFrame(); |
| 2434 } |
| 2435 |
| 2436 |
| 2437 void Assembler::ReturnPatchable() { |
| 2438 // This sequence must have a fixed size so that it can be patched by the |
| 2439 // debugger. |
| 2440 intptr_t start = buffer_.GetPosition(); |
| 2441 LeaveFrameWithPP(); |
| 2442 ret(); |
| 2443 nop(4); |
| 2444 ASSERT((buffer_.GetPosition() - start) == 13); |
| 2445 } |
| 2446 |
| 2447 |
2301 void Assembler::ReserveAlignedFrameSpace(intptr_t frame_space) { | 2448 void Assembler::ReserveAlignedFrameSpace(intptr_t frame_space) { |
2302 // Reserve space for arguments and align frame before entering | 2449 // Reserve space for arguments and align frame before entering |
2303 // the C++ world. | 2450 // the C++ world. |
2304 AddImmediate(RSP, Immediate(-frame_space)); | 2451 AddImmediate(RSP, Immediate(-frame_space)); |
2305 if (OS::ActivationFrameAlignment() > 1) { | 2452 if (OS::ActivationFrameAlignment() > 1) { |
2306 andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1))); | 2453 andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1))); |
2307 } | 2454 } |
2308 } | 2455 } |
2309 | 2456 |
2310 | 2457 |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2371 leave(); | 2518 leave(); |
2372 } | 2519 } |
2373 | 2520 |
2374 | 2521 |
2375 void Assembler::CallRuntime(const RuntimeEntry& entry, | 2522 void Assembler::CallRuntime(const RuntimeEntry& entry, |
2376 intptr_t argument_count) { | 2523 intptr_t argument_count) { |
2377 entry.Call(this, argument_count); | 2524 entry.Call(this, argument_count); |
2378 } | 2525 } |
2379 | 2526 |
2380 | 2527 |
| 2528 void Assembler::LoadPoolPointer(Register pp) { |
| 2529 Label next; |
| 2530 call(&next); |
| 2531 Bind(&next); |
| 2532 |
| 2533 // Load new pool pointer. |
| 2534 const intptr_t object_pool_pc_dist = |
| 2535 Instructions::HeaderSize() - Instructions::object_pool_offset() + |
| 2536 CodeSize(); |
| 2537 popq(pp); |
| 2538 movq(pp, Address(pp, -object_pool_pc_dist)); |
| 2539 } |
| 2540 |
| 2541 |
2381 void Assembler::EnterDartFrame(intptr_t frame_size) { | 2542 void Assembler::EnterDartFrame(intptr_t frame_size) { |
2382 EnterFrame(0); | 2543 EnterFrame(0); |
| 2544 |
2383 Label dart_entry; | 2545 Label dart_entry; |
2384 call(&dart_entry); | 2546 call(&dart_entry); |
2385 Bind(&dart_entry); | 2547 Bind(&dart_entry); |
2386 // The runtime system assumes that the code marker address is | 2548 // The runtime system assumes that the code marker address is |
2387 // kEntryPointToPcMarkerOffset bytes from the entry. If there is any code | 2549 // kEntryPointToPcMarkerOffset bytes from the entry. If there is any code |
2388 // generated before entering the frame, the address needs to be adjusted. | 2550 // generated before entering the frame, the address needs to be adjusted. |
| 2551 const intptr_t object_pool_pc_dist = |
| 2552 Instructions::HeaderSize() - Instructions::object_pool_offset() + |
| 2553 CodeSize(); |
2389 const intptr_t offset = kEntryPointToPcMarkerOffset - CodeSize(); | 2554 const intptr_t offset = kEntryPointToPcMarkerOffset - CodeSize(); |
2390 if (offset != 0) { | 2555 if (offset != 0) { |
2391 addq(Address(RSP, 0), Immediate(offset)); | 2556 addq(Address(RSP, 0), Immediate(offset)); |
2392 } | 2557 } |
| 2558 // Save caller's pool pointer |
| 2559 pushq(PP); |
| 2560 |
| 2561 // Load callee's pool pointer. |
| 2562 movq(PP, Address(RSP, 1 * kWordSize)); |
| 2563 movq(PP, Address(PP, -object_pool_pc_dist - offset)); |
| 2564 |
| 2565 if (frame_size != 0) { |
| 2566 subq(RSP, Immediate(frame_size)); |
| 2567 } |
| 2568 } |
| 2569 |
| 2570 |
| 2571 void Assembler::EnterDartFrameWithInfo(intptr_t frame_size, |
| 2572 Register new_pp, Register new_pc) { |
| 2573 if (new_pc == kNoRegister) { |
| 2574 EnterDartFrame(0); |
| 2575 } else { |
| 2576 EnterFrame(0); |
| 2577 pushq(new_pc); |
| 2578 pushq(PP); |
| 2579 movq(PP, new_pp); |
| 2580 } |
2393 if (frame_size != 0) { | 2581 if (frame_size != 0) { |
2394 subq(RSP, Immediate(frame_size)); | 2582 subq(RSP, Immediate(frame_size)); |
2395 } | 2583 } |
2396 } | 2584 } |
2397 | 2585 |
2398 | 2586 |
2399 // On entry to a function compiled for OSR, the caller's frame pointer, the | 2587 // On entry to a function compiled for OSR, the caller's frame pointer, the |
2400 // stack locals, and any copied parameters are already in place. The frame | 2588 // stack locals, and any copied parameters are already in place. The frame |
2401 // pointer is already set up. The PC marker is not correct for the | 2589 // pointer is already set up. The PC marker is not correct for the |
2402 // optimized function and there may be extra space for spill slots to | 2590 // optimized function and there may be extra space for spill slots to |
2403 // allocate. | 2591 // allocate. |
2404 void Assembler::EnterOsrFrame(intptr_t extra_size) { | 2592 void Assembler::EnterOsrFrame(intptr_t extra_size, |
2405 Label dart_entry; | 2593 Register new_pp, Register new_pc) { |
2406 call(&dart_entry); | 2594 if (new_pc == kNoRegister) { |
2407 Bind(&dart_entry); | 2595 Label dart_entry; |
2408 // The runtime system assumes that the code marker address is | 2596 call(&dart_entry); |
2409 // kEntryPointToPcMarkerOffset bytes from the entry. Since there is no | 2597 Bind(&dart_entry); |
2410 // code to set up the frame pointer, the address needs to be adjusted. | 2598 // The runtime system assumes that the code marker address is |
2411 const intptr_t offset = kEntryPointToPcMarkerOffset - CodeSize(); | 2599 // kEntryPointToPcMarkerOffset bytes from the entry. Since there is no |
2412 if (offset != 0) { | 2600 // code to set up the frame pointer, the address needs to be adjusted. |
2413 addq(Address(RSP, 0), Immediate(offset)); | 2601 const intptr_t object_pool_pc_dist = |
| 2602 Instructions::HeaderSize() - Instructions::object_pool_offset() + |
| 2603 CodeSize(); |
| 2604 const intptr_t offset = kEntryPointToPcMarkerOffset - CodeSize(); |
| 2605 if (offset != 0) { |
| 2606 addq(Address(RSP, 0), Immediate(offset)); |
| 2607 } |
| 2608 |
| 2609 // Load callee's pool pointer. |
| 2610 movq(PP, Address(RSP, 0)); |
| 2611 movq(PP, Address(PP, -object_pool_pc_dist - offset)); |
| 2612 |
| 2613 popq(Address(RBP, kPcMarkerSlotFromFp * kWordSize)); |
| 2614 } else { |
| 2615 movq(Address(RBP, kPcMarkerSlotFromFp * kWordSize), new_pc); |
| 2616 movq(PP, new_pp); |
2414 } | 2617 } |
2415 popq(Address(RBP, kPcMarkerSlotFromFp * kWordSize)); | |
2416 if (extra_size != 0) { | 2618 if (extra_size != 0) { |
2417 subq(RSP, Immediate(extra_size)); | 2619 subq(RSP, Immediate(extra_size)); |
2418 } | 2620 } |
2419 } | 2621 } |
2420 | 2622 |
2421 | 2623 |
2422 void Assembler::EnterStubFrame() { | 2624 void Assembler::EnterStubFrame() { |
2423 EnterFrame(0); | 2625 EnterFrame(0); |
2424 pushq(Immediate(0)); // Push 0 in the saved PC area for stub frames. | 2626 pushq(Immediate(0)); // Push 0 in the saved PC area for stub frames. |
2425 } | 2627 } |
2426 | 2628 |
2427 | 2629 |
| 2630 void Assembler::EnterStubFrameWithPP() { |
| 2631 EnterFrame(0); |
| 2632 pushq(Immediate(0)); // Push 0 in the saved PC area for stub frames. |
| 2633 pushq(PP); // Save caller's pool pointer |
| 2634 LoadPoolPointer(PP); |
| 2635 } |
| 2636 |
| 2637 |
2428 void Assembler::TryAllocate(const Class& cls, | 2638 void Assembler::TryAllocate(const Class& cls, |
2429 Label* failure, | 2639 Label* failure, |
2430 bool near_jump, | 2640 bool near_jump, |
2431 Register instance_reg) { | 2641 Register instance_reg) { |
2432 ASSERT(failure != NULL); | 2642 ASSERT(failure != NULL); |
2433 if (FLAG_inline_alloc) { | 2643 if (FLAG_inline_alloc) { |
2434 Heap* heap = Isolate::Current()->heap(); | 2644 Heap* heap = Isolate::Current()->heap(); |
2435 const intptr_t instance_size = cls.instance_size(); | 2645 const intptr_t instance_size = cls.instance_size(); |
2436 movq(TMP, Immediate(heap->TopAddress())); | 2646 movq(TMP, Immediate(heap->TopAddress())); |
2437 movq(instance_reg, Address(TMP, 0)); | 2647 movq(instance_reg, Address(TMP, 0)); |
(...skipping 202 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2640 "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7", | 2850 "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7", |
2641 "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13", "xmm14", "xmm15" | 2851 "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13", "xmm14", "xmm15" |
2642 }; | 2852 }; |
2643 | 2853 |
2644 | 2854 |
2645 const char* Assembler::FpuRegisterName(FpuRegister reg) { | 2855 const char* Assembler::FpuRegisterName(FpuRegister reg) { |
2646 ASSERT((0 <= reg) && (reg < kNumberOfXmmRegisters)); | 2856 ASSERT((0 <= reg) && (reg < kNumberOfXmmRegisters)); |
2647 return xmm_reg_names[reg]; | 2857 return xmm_reg_names[reg]; |
2648 } | 2858 } |
2649 | 2859 |
2650 | |
2651 } // namespace dart | 2860 } // namespace dart |
2652 | 2861 |
2653 #endif // defined TARGET_ARCH_X64 | 2862 #endif // defined TARGET_ARCH_X64 |
OLD | NEW |