Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(180)

Side by Side Diff: runtime/vm/assembler_x64.cc

Issue 22825023: Uses an object pool on x64 (Closed) Base URL: http://dart.googlecode.com/svn/branches/bleeding_edge/dart/
Patch Set: Created 7 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" 5 #include "vm/globals.h"
6 #if defined(TARGET_ARCH_X64) 6 #if defined(TARGET_ARCH_X64)
7 7
8 #include "vm/assembler.h" 8 #include "vm/assembler.h"
9 #include "vm/heap.h" 9 #include "vm/heap.h"
10 #include "vm/memory_region.h" 10 #include "vm/memory_region.h"
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
58 reinterpret_cast<DetectCPUFeatures>(instructions.EntryPoint())(); 58 reinterpret_cast<DetectCPUFeatures>(instructions.EntryPoint())();
59 sse4_1_supported_ = (features & kSSE4_1BitMask) != 0; 59 sse4_1_supported_ = (features & kSSE4_1BitMask) != 0;
60 #ifdef DEBUG 60 #ifdef DEBUG
61 initialized_ = true; 61 initialized_ = true;
62 #endif 62 #endif
63 } 63 }
64 64
65 #undef __ 65 #undef __
66 66
67 67
68 Assembler::Assembler(bool use_far_branches)
69 : buffer_(),
70 object_pool_(GrowableObjectArray::Handle()),
71 prologue_offset_(-1),
72 comments_() {
73 // Far branching mode is only needed and implemented for MIPS and ARM.
74 ASSERT(!use_far_branches);
75 if (Isolate::Current() != Dart::vm_isolate()) {
76 object_pool_ = GrowableObjectArray::New(Heap::kOld);
77
78 // These objects and labels need to be accessible through every pool-pointer
79 // at the same index.
80 object_pool_.Add(Object::Handle(), Heap::kOld);
81 object_pool_.Add(Bool::True(), Heap::kOld);
82 object_pool_.Add(Bool::False(), Heap::kOld);
83
84 if (StubCode::UpdateStoreBuffer_entry() != NULL) {
85 FindExternalLabel(&StubCode::UpdateStoreBufferLabel(), kNotPatchable);
86 } else {
87 object_pool_.Add(Object::Handle(), Heap::kOld);
88 }
89
90 if (StubCode::CallToRuntime_entry() != NULL) {
91 FindExternalLabel(&StubCode::CallToRuntimeLabel(), kNotPatchable);
92 } else {
93 object_pool_.Add(Object::Handle(), Heap::kOld);
94 }
95 }
96 }
97
98
68 void Assembler::InitializeMemoryWithBreakpoints(uword data, int length) { 99 void Assembler::InitializeMemoryWithBreakpoints(uword data, int length) {
69 memset(reinterpret_cast<void*>(data), Instr::kBreakPointInstruction, length); 100 memset(reinterpret_cast<void*>(data), Instr::kBreakPointInstruction, length);
70 } 101 }
71 102
72 103
73 void Assembler::call(Register reg) { 104 void Assembler::call(Register reg) {
74 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 105 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
75 Operand operand(reg); 106 Operand operand(reg);
76 EmitOperandREX(2, operand, REX_NONE); 107 EmitOperandREX(2, operand, REX_NONE);
77 EmitUint8(0xFF); 108 EmitUint8(0xFF);
(...skipping 10 matching lines...) Expand all
88 119
89 120
90 void Assembler::call(Label* label) { 121 void Assembler::call(Label* label) {
91 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 122 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
92 static const int kSize = 5; 123 static const int kSize = 5;
93 EmitUint8(0xE8); 124 EmitUint8(0xE8);
94 EmitLabel(label, kSize); 125 EmitLabel(label, kSize);
95 } 126 }
96 127
97 128
129 void Assembler::LoadExternalLabel(Register dst,
130 const ExternalLabel* label,
131 Patchability patchable,
132 Register pp) {
133 const int32_t offset =
134 Array::element_offset(FindExternalLabel(label, patchable));
135 LoadWordFromPoolOffset(dst, pp, offset - kHeapObjectTag);
136 }
137
138
98 void Assembler::call(const ExternalLabel* label) { 139 void Assembler::call(const ExternalLabel* label) {
99 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 140 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
100 intptr_t call_start = buffer_.GetPosition();
101 141
102 // Encode movq(TMP, Immediate(label->address())), but always as imm64. 142 // Encode movq(TMP, Immediate(label->address())), but always as imm64.
103 EmitRegisterREX(TMP, REX_W); 143 EmitRegisterREX(TMP, REX_W);
104 EmitUint8(0xB8 | (TMP & 7)); 144 EmitUint8(0xB8 | (TMP & 7));
105 EmitInt64(label->address()); 145 EmitInt64(label->address());
106 146
107 // Encode call(TMP). 147 // Encode call(TMP).
Florian Schneider 2013/09/06 09:58:15 This should just be: __ call(TMP)
zra 2013/09/06 17:53:26 Done.
108 Operand operand(TMP); 148 Operand operand(TMP);
109 EmitOperandREX(2, operand, REX_NONE); 149 EmitOperandREX(2, operand, REX_NONE);
110 EmitUint8(0xFF); 150 EmitUint8(0xFF);
111 EmitOperand(2, operand); 151 EmitOperand(2, operand);
152 }
112 153
154
155 void Assembler::CallPatchable(const ExternalLabel* label) {
156 intptr_t call_start = buffer_.GetPosition();
157 LoadExternalLabel(TMP, label, kPatchable, PP);
158 {
Florian Schneider 2013/09/06 09:58:15 call(TMP)
zra 2013/09/06 17:53:26 Done.
159 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
160 // Encode call(TMP).
161 Operand operand(TMP);
162 EmitOperandREX(2, operand, REX_NONE);
163 EmitUint8(0xFF);
164 EmitOperand(2, operand);
165 }
113 ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize); 166 ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize);
114 } 167 }
115 168
116 169
170 void Assembler::Call(const ExternalLabel* label, Register pp) {
171 if (Isolate::Current() == Dart::vm_isolate()) {
172 call(label);
173 } else {
174 LoadExternalLabel(TMP, label, kNotPatchable, pp);
175 {
Florian Schneider 2013/09/06 09:58:15 call(TMP)
zra 2013/09/06 17:53:26 Done.
176 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
177 // Encode call(TMP).
178 Operand operand(TMP);
179 EmitOperandREX(2, operand, REX_NONE);
180 EmitUint8(0xFF);
181 EmitOperand(2, operand);
182 }
183 }
184 }
185
186
117 void Assembler::pushq(Register reg) { 187 void Assembler::pushq(Register reg) {
118 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 188 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
119 EmitRegisterREX(reg, REX_NONE); 189 EmitRegisterREX(reg, REX_NONE);
120 EmitUint8(0x50 | (reg & 7)); 190 EmitUint8(0x50 | (reg & 7));
121 } 191 }
122 192
123 193
124 void Assembler::pushq(const Address& address) { 194 void Assembler::pushq(const Address& address) {
125 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 195 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
126 EmitOperandREX(6, address, REX_NONE); 196 EmitOperandREX(6, address, REX_NONE);
(...skipping 1826 matching lines...) Expand 10 before | Expand all | Expand 10 after
1953 2023
1954 2024
1955 void Assembler::j(Condition condition, const ExternalLabel* label) { 2025 void Assembler::j(Condition condition, const ExternalLabel* label) {
1956 Label no_jump; 2026 Label no_jump;
1957 j(static_cast<Condition>(condition ^ 1), &no_jump); // Negate condition. 2027 j(static_cast<Condition>(condition ^ 1), &no_jump); // Negate condition.
1958 jmp(label); 2028 jmp(label);
1959 Bind(&no_jump); 2029 Bind(&no_jump);
1960 } 2030 }
1961 2031
1962 2032
2033 void Assembler::J(Condition condition, const ExternalLabel* label,
2034 Register pp) {
2035 Label no_jump;
2036 j(static_cast<Condition>(condition ^ 1), &no_jump); // Negate condition.
2037 Jmp(label, pp);
2038 Bind(&no_jump);
2039 }
2040
2041
1963 void Assembler::jmp(Register reg) { 2042 void Assembler::jmp(Register reg) {
1964 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 2043 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1965 Operand operand(reg); 2044 Operand operand(reg);
1966 EmitOperandREX(4, operand, REX_NONE); 2045 EmitOperandREX(4, operand, REX_NONE);
1967 EmitUint8(0xFF); 2046 EmitUint8(0xFF);
1968 EmitOperand(4, operand); 2047 EmitOperand(4, operand);
1969 } 2048 }
1970 2049
1971 2050
1972 void Assembler::jmp(Label* label, bool near) { 2051 void Assembler::jmp(Label* label, bool near) {
(...skipping 15 matching lines...) Expand all
1988 EmitNearLabelLink(label); 2067 EmitNearLabelLink(label);
1989 } else { 2068 } else {
1990 EmitUint8(0xE9); 2069 EmitUint8(0xE9);
1991 EmitLabelLink(label); 2070 EmitLabelLink(label);
1992 } 2071 }
1993 } 2072 }
1994 2073
1995 2074
1996 void Assembler::jmp(const ExternalLabel* label) { 2075 void Assembler::jmp(const ExternalLabel* label) {
1997 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 2076 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1998 intptr_t call_start = buffer_.GetPosition();
1999 2077
2000 // Encode movq(TMP, Immediate(label->address())), but always as imm64. 2078 // Encode movq(TMP, Immediate(label->address())), but always as imm64.
2001 EmitRegisterREX(TMP, REX_W); 2079 EmitRegisterREX(TMP, REX_W);
2002 EmitUint8(0xB8 | (TMP & 7)); 2080 EmitUint8(0xB8 | (TMP & 7));
2003 EmitInt64(label->address()); 2081 EmitInt64(label->address());
2004 2082
2005 // Encode jmp(TMP). 2083 // Encode jmp(TMP).
2006 Operand operand(TMP); 2084 Operand operand(TMP);
2007 EmitOperandREX(4, operand, REX_NONE); 2085 EmitOperandREX(4, operand, REX_NONE);
2008 EmitUint8(0xFF); 2086 EmitUint8(0xFF);
2009 EmitOperand(4, operand); 2087 EmitOperand(4, operand);
2088 }
2010 2089
2090
2091 void Assembler::JmpPatchable(const ExternalLabel* label, Register pp) {
2092 intptr_t call_start = buffer_.GetPosition();
2093 LoadExternalLabel(TMP, label, kPatchable, pp);
2094 {
2095 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2096 // Encode jmp(TMP).
Florian Schneider 2013/09/06 09:58:15 jmp(TMP)
zra 2013/09/06 17:53:26 Done.
2097 Operand operand(TMP);
2098 EmitOperandREX(4, operand, REX_NONE);
2099 EmitUint8(0xFF);
2100 EmitOperand(4, operand);
2101 }
2011 ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize); 2102 ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize);
2012 } 2103 }
2013 2104
2014 2105
2106 void Assembler::Jmp(const ExternalLabel* label, Register pp) {
2107 LoadExternalLabel(TMP, label, kNotPatchable, pp);
2108 {
Florian Schneider 2013/09/06 09:58:15 jmp(TMP)
zra 2013/09/06 17:53:26 Done.
2109 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2110 // Encode jmp(TMP).
2111 Operand operand(TMP);
2112 EmitOperandREX(4, operand, REX_NONE);
2113 EmitUint8(0xFF);
2114 EmitOperand(4, operand);
2115 }
2116 }
2117
2118
2015 void Assembler::lock() { 2119 void Assembler::lock() {
2016 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 2120 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2017 EmitUint8(0xF0); 2121 EmitUint8(0xF0);
2018 } 2122 }
2019 2123
2020 2124
2021 void Assembler::cmpxchgl(const Address& address, Register reg) { 2125 void Assembler::cmpxchgl(const Address& address, Register reg) {
2022 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 2126 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2023 EmitOperandREX(reg, address, REX_NONE); 2127 EmitOperandREX(reg, address, REX_NONE);
2024 EmitUint8(0x0F); 2128 EmitUint8(0x0F);
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
2084 if (stack_elements <= 4) { 2188 if (stack_elements <= 4) {
2085 for (intptr_t i = 0; i < stack_elements; i++) { 2189 for (intptr_t i = 0; i < stack_elements; i++) {
2086 popq(TMP); 2190 popq(TMP);
2087 } 2191 }
2088 return; 2192 return;
2089 } 2193 }
2090 addq(RSP, Immediate(stack_elements * kWordSize)); 2194 addq(RSP, Immediate(stack_elements * kWordSize));
2091 } 2195 }
2092 2196
2093 2197
2094 void Assembler::LoadObject(Register dst, const Object& object) { 2198 intptr_t Assembler::FindObject(const Object& obj) {
2095 if (object.IsSmi() || object.InVMHeap()) { 2199 // The object pool cannot be used in the vm isolate.
2200 ASSERT(Isolate::Current() != Dart::vm_isolate());
2201 ASSERT(obj.IsNotTemporaryScopedHandle());
2202 ASSERT(obj.IsOld());
2203 ASSERT(!object_pool_.IsNull());
2204 // TODO(zra): This can be slow. Add a hash map from obj.raw() to
2205 // object pool indexes to speed lookup.
2206 for (int i = 0; i < object_pool_.Length(); i++) {
2207 if (object_pool_.At(i) == obj.raw()) {
2208 return i;
2209 }
2210 }
2211 object_pool_.Add(obj, Heap::kOld);
2212 return object_pool_.Length() - 1;
2213 }
2214
2215
2216 intptr_t Assembler::FindExternalLabel(const ExternalLabel* label,
2217 Patchability patchable) {
srdjan 2013/09/06 16:22:44 fix indent
zra 2013/09/06 17:53:26 Done.
2218 // The object pool cannot be used in the vm isolate.
2219 ASSERT(Isolate::Current() != Dart::vm_isolate());
2220 ASSERT(!object_pool_.IsNull());
2221 const uword address = label->address();
2222 ASSERT(Utils::IsAligned(address, 4));
2223 // The address is stored in the object array as a RawSmi.
2224 const Smi& smi = Smi::Handle(reinterpret_cast<RawSmi*>(address));
2225 if (patchable == kNotPatchable) {
2226 // An external label used in a non-patchable call shouldn't also be used in
2227 // patchable calls. So, we can re-use existing entries for non-patchable
2228 // calls.
Florian Schneider 2013/09/06 09:58:15 I find the whole approach a little brittle: Forget
zra 2013/09/06 17:53:26 It is equivalent to check that a patchable externa
2229 // TODO(zra): This can be slow. Add a hash map from obj.raw() to
2230 // object pool indexes to speed lookup.
2231 for (int i = 0; i < object_pool_.Length(); i++) {
2232 if (object_pool_.At(i) == smi.raw()) {
2233 return i;
2234 }
2235 }
srdjan 2013/09/06 16:22:44 Could you replace this code with call to Assembler
zra 2013/09/06 17:53:26 Done.
2236 }
2237 // If the call is patchable, do not reuse an existing entry since each
2238 // reference may be patched independently.
2239 object_pool_.Add(smi, Heap::kOld);
2240 return object_pool_.Length() - 1;
2241 }
2242
2243
2244 bool Assembler::CanLoadFromObjectPool(const Object& object) {
2245 return !object.IsSmi() && // Not a Smi
2246 // Not in the VMHeap, OR is one of the VMHeap objects we put in every
2247 // object pool.
2248 (!object.InVMHeap() || (object.raw() == Object::null()) ||
2249 (object.raw() == Bool::True().raw()) ||
2250 (object.raw() == Bool::False().raw())) &&
2251 object.IsNotTemporaryScopedHandle() &&
2252 object.IsOld();
2253 }
2254
2255
2256 void Assembler::LoadWordFromPoolOffset(Register dst, Register pp,
2257 int32_t offset) {
2258 // This sequence must be of fixed size. The 'true' argument to the Address
2259 // constructor forces it to use a fixed size encoding.
2260 movq(dst, Address(pp, offset, true));
2261 }
2262
2263
2264 void Assembler::LoadObjectFromPool(Register dst, const Object& object,
2265 Register pp) {
2266 if (CanLoadFromObjectPool(object)) {
2267 const int32_t offset = Array::element_offset(FindObject(object));
2268 LoadWordFromPoolOffset(dst, pp, offset - kHeapObjectTag);
2269 } else {
2096 movq(dst, Immediate(reinterpret_cast<int64_t>(object.raw()))); 2270 movq(dst, Immediate(reinterpret_cast<int64_t>(object.raw())));
2097 } else {
2098 ASSERT(object.IsNotTemporaryScopedHandle());
2099 ASSERT(object.IsOld());
2100 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2101 EmitRegisterREX(dst, REX_W);
2102 EmitUint8(0xB8 | (dst & 7));
2103 buffer_.EmitObject(object);
2104 } 2271 }
2105 } 2272 }
2106 2273
2107 2274
2108 void Assembler::StoreObject(const Address& dst, const Object& object) { 2275 void Assembler::StoreObject(const Address& dst, const Object& object) {
2109 if (object.IsSmi() || object.InVMHeap()) { 2276 if (CanLoadFromObjectPool(object)) {
2110 movq(dst, Immediate(reinterpret_cast<int64_t>(object.raw())));
2111 } else {
2112 ASSERT(object.IsNotTemporaryScopedHandle());
2113 ASSERT(object.IsOld());
2114 LoadObject(TMP, object); 2277 LoadObject(TMP, object);
2115 movq(dst, TMP); 2278 movq(dst, TMP);
2279 } else {
2280 movq(dst, Immediate(reinterpret_cast<int64_t>(object.raw())));
2116 } 2281 }
2117 } 2282 }
2118 2283
2119 2284
2120 void Assembler::PushObject(const Object& object) { 2285 void Assembler::PushObject(const Object& object) {
2121 if (object.IsSmi() || object.InVMHeap()) { 2286 if (CanLoadFromObjectPool(object)) {
2122 pushq(Immediate(reinterpret_cast<int64_t>(object.raw())));
2123 } else {
2124 LoadObject(TMP, object); 2287 LoadObject(TMP, object);
2125 pushq(TMP); 2288 pushq(TMP);
2289 } else {
2290 pushq(Immediate(reinterpret_cast<int64_t>(object.raw())));
2126 } 2291 }
2127 } 2292 }
2128 2293
2129 2294
2130 void Assembler::CompareObject(Register reg, const Object& object) { 2295 void Assembler::CompareObject(Register reg, const Object& object) {
2131 if (object.IsSmi() || object.InVMHeap()) { 2296 if (CanLoadFromObjectPool(object)) {
2132 cmpq(reg, Immediate(reinterpret_cast<int64_t>(object.raw())));
2133 } else {
2134 ASSERT(reg != TMP); 2297 ASSERT(reg != TMP);
2135 LoadObject(TMP, object); 2298 LoadObject(TMP, object);
2136 cmpq(reg, TMP); 2299 cmpq(reg, TMP);
2300 } else {
2301 cmpq(reg, Immediate(reinterpret_cast<int64_t>(object.raw())));
2137 } 2302 }
2138 } 2303 }
2139 2304
2140 2305
2141 // Destroys the value register. 2306 // Destroys the value register.
2142 void Assembler::StoreIntoObjectFilterNoSmi(Register object, 2307 void Assembler::StoreIntoObjectFilterNoSmi(Register object,
2143 Register value, 2308 Register value,
2144 Label* no_update) { 2309 Label* no_update) {
2145 COMPILE_ASSERT((kNewObjectAlignmentOffset == kWordSize) && 2310 COMPILE_ASSERT((kNewObjectAlignmentOffset == kWordSize) &&
2146 (kOldObjectAlignmentOffset == 0), young_alignment); 2311 (kOldObjectAlignmentOffset == 0), young_alignment);
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
2189 if (can_value_be_smi) { 2354 if (can_value_be_smi) {
2190 StoreIntoObjectFilter(object, value, &done); 2355 StoreIntoObjectFilter(object, value, &done);
2191 } else { 2356 } else {
2192 StoreIntoObjectFilterNoSmi(object, value, &done); 2357 StoreIntoObjectFilterNoSmi(object, value, &done);
2193 } 2358 }
2194 // A store buffer update is required. 2359 // A store buffer update is required.
2195 if (value != RAX) pushq(RAX); 2360 if (value != RAX) pushq(RAX);
2196 if (object != RAX) { 2361 if (object != RAX) {
2197 movq(RAX, object); 2362 movq(RAX, object);
2198 } 2363 }
2199 call(&StubCode::UpdateStoreBufferLabel()); 2364 Call(&StubCode::UpdateStoreBufferLabel(), PP);
2200 if (value != RAX) popq(RAX); 2365 if (value != RAX) popq(RAX);
2201 Bind(&done); 2366 Bind(&done);
2202 } 2367 }
2203 2368
2204 2369
2205 void Assembler::StoreIntoObjectNoBarrier(Register object, 2370 void Assembler::StoreIntoObjectNoBarrier(Register object,
2206 const Address& dest, 2371 const Address& dest,
2207 Register value) { 2372 Register value) {
2208 movq(dest, value); 2373 movq(dest, value);
2209 #if defined(DEBUG) 2374 #if defined(DEBUG)
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
2291 } 2456 }
2292 } 2457 }
2293 2458
2294 2459
2295 void Assembler::LeaveFrame() { 2460 void Assembler::LeaveFrame() {
2296 movq(RSP, RBP); 2461 movq(RSP, RBP);
2297 popq(RBP); 2462 popq(RBP);
2298 } 2463 }
2299 2464
2300 2465
2466 void Assembler::LeaveFrameWithPP() {
2467 movq(PP, Address(RBP, -2 * kWordSize));
2468 LeaveFrame();
2469 }
2470
2471
2472 void Assembler::ReturnPatchable() {
2473 // This sequence must have a fixed size so that it can be patched by the
2474 // debugger.
2475 intptr_t start = buffer_.GetPosition();
2476 LeaveFrameWithPP();
2477 ret();
2478 nop(4);
2479 ASSERT((buffer_.GetPosition() - start) == 13);
2480 }
2481
2482
2301 void Assembler::ReserveAlignedFrameSpace(intptr_t frame_space) { 2483 void Assembler::ReserveAlignedFrameSpace(intptr_t frame_space) {
2302 // Reserve space for arguments and align frame before entering 2484 // Reserve space for arguments and align frame before entering
2303 // the C++ world. 2485 // the C++ world.
2304 AddImmediate(RSP, Immediate(-frame_space)); 2486 AddImmediate(RSP, Immediate(-frame_space));
2305 if (OS::ActivationFrameAlignment() > 1) { 2487 if (OS::ActivationFrameAlignment() > 1) {
2306 andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1))); 2488 andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1)));
2307 } 2489 }
2308 } 2490 }
2309 2491
2310 2492
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
2371 leave(); 2553 leave();
2372 } 2554 }
2373 2555
2374 2556
2375 void Assembler::CallRuntime(const RuntimeEntry& entry, 2557 void Assembler::CallRuntime(const RuntimeEntry& entry,
2376 intptr_t argument_count) { 2558 intptr_t argument_count) {
2377 entry.Call(this, argument_count); 2559 entry.Call(this, argument_count);
2378 } 2560 }
2379 2561
2380 2562
2563 void Assembler::LoadPoolPointer(Register pp) {
2564 Label next;
2565 call(&next);
2566 Bind(&next);
2567
2568 // Load new pool pointer.
2569 const intptr_t object_pool_pc_dist =
2570 Instructions::HeaderSize() - Instructions::object_pool_offset() +
2571 CodeSize();
2572 popq(pp);
2573 movq(pp, Address(pp, -object_pool_pc_dist));
2574 }
2575
2576
2381 void Assembler::EnterDartFrame(intptr_t frame_size) { 2577 void Assembler::EnterDartFrame(intptr_t frame_size) {
2382 EnterFrame(0); 2578 EnterFrame(0);
2579
2383 Label dart_entry; 2580 Label dart_entry;
2384 call(&dart_entry); 2581 call(&dart_entry);
2385 Bind(&dart_entry); 2582 Bind(&dart_entry);
2386 // The runtime system assumes that the code marker address is 2583 // The runtime system assumes that the code marker address is
2387 // kEntryPointToPcMarkerOffset bytes from the entry. If there is any code 2584 // kEntryPointToPcMarkerOffset bytes from the entry. If there is any code
2388 // generated before entering the frame, the address needs to be adjusted. 2585 // generated before entering the frame, the address needs to be adjusted.
2586 const intptr_t object_pool_pc_dist =
2587 Instructions::HeaderSize() - Instructions::object_pool_offset() +
2588 CodeSize();
2389 const intptr_t offset = kEntryPointToPcMarkerOffset - CodeSize(); 2589 const intptr_t offset = kEntryPointToPcMarkerOffset - CodeSize();
2390 if (offset != 0) { 2590 if (offset != 0) {
2391 addq(Address(RSP, 0), Immediate(offset)); 2591 addq(Address(RSP, 0), Immediate(offset));
2392 } 2592 }
2593 // Save caller's pool pointer
2594 pushq(PP);
2595
2596 // Load callee's pool pointer.
2597 movq(PP, Address(RSP, 1 * kWordSize));
2598 movq(PP, Address(PP, -object_pool_pc_dist - offset));
2599
2600 if (frame_size != 0) {
2601 subq(RSP, Immediate(frame_size));
2602 }
2603 }
2604
2605
2606 void Assembler::EnterDartFrameWithInfo(intptr_t frame_size,
2607 Register new_pp, Register new_pc) {
2608 if (new_pc == kNoRegister) {
2609 EnterDartFrame(0);
2610 } else {
2611 EnterFrame(0);
2612 pushq(new_pc);
2613 pushq(PP);
2614 movq(PP, new_pp);
2615 }
2393 if (frame_size != 0) { 2616 if (frame_size != 0) {
2394 subq(RSP, Immediate(frame_size)); 2617 subq(RSP, Immediate(frame_size));
2395 } 2618 }
2396 } 2619 }
2397 2620
2398 2621
2399 // On entry to a function compiled for OSR, the caller's frame pointer, the 2622 // On entry to a function compiled for OSR, the caller's frame pointer, the
2400 // stack locals, and any copied parameters are already in place. The frame 2623 // stack locals, and any copied parameters are already in place. The frame
2401 // pointer is already set up. The PC marker is not correct for the 2624 // pointer is already set up. The PC marker is not correct for the
2402 // optimized function and there may be extra space for spill slots to 2625 // optimized function and there may be extra space for spill slots to
2403 // allocate. 2626 // allocate.
2404 void Assembler::EnterOsrFrame(intptr_t extra_size) { 2627 void Assembler::EnterOsrFrame(intptr_t extra_size,
2405 Label dart_entry; 2628 Register new_pp, Register new_pc) {
2406 call(&dart_entry); 2629 if (new_pc == kNoRegister) {
2407 Bind(&dart_entry); 2630 Label dart_entry;
2408 // The runtime system assumes that the code marker address is 2631 call(&dart_entry);
2409 // kEntryPointToPcMarkerOffset bytes from the entry. Since there is no 2632 Bind(&dart_entry);
2410 // code to set up the frame pointer, the address needs to be adjusted. 2633 // The runtime system assumes that the code marker address is
2411 const intptr_t offset = kEntryPointToPcMarkerOffset - CodeSize(); 2634 // kEntryPointToPcMarkerOffset bytes from the entry. Since there is no
2412 if (offset != 0) { 2635 // code to set up the frame pointer, the address needs to be adjusted.
2413 addq(Address(RSP, 0), Immediate(offset)); 2636 const intptr_t object_pool_pc_dist =
2637 Instructions::HeaderSize() - Instructions::object_pool_offset() +
2638 CodeSize();
2639 const intptr_t offset = kEntryPointToPcMarkerOffset - CodeSize();
2640 if (offset != 0) {
2641 addq(Address(RSP, 0), Immediate(offset));
2642 }
2643
2644 // Load callee's pool pointer.
2645 movq(PP, Address(RSP, 0));
2646 movq(PP, Address(PP, -object_pool_pc_dist - offset));
2647
2648 popq(Address(RBP, kPcMarkerSlotFromFp * kWordSize));
2649 } else {
2650 movq(Address(RBP, kPcMarkerSlotFromFp * kWordSize), new_pc);
2651 movq(PP, new_pp);
2414 } 2652 }
2415 popq(Address(RBP, kPcMarkerSlotFromFp * kWordSize));
2416 if (extra_size != 0) { 2653 if (extra_size != 0) {
2417 subq(RSP, Immediate(extra_size)); 2654 subq(RSP, Immediate(extra_size));
2418 } 2655 }
2419 } 2656 }
2420 2657
2421 2658
2422 void Assembler::EnterStubFrame() { 2659 void Assembler::EnterStubFrame() {
2423 EnterFrame(0); 2660 EnterFrame(0);
2424 pushq(Immediate(0)); // Push 0 in the saved PC area for stub frames. 2661 pushq(Immediate(0)); // Push 0 in the saved PC area for stub frames.
2425 } 2662 }
2426 2663
2427 2664
2665 void Assembler::EnterStubFrameWithPP() {
2666 EnterFrame(0);
2667 pushq(Immediate(0)); // Push 0 in the saved PC area for stub frames.
2668 pushq(PP); // Save caller's pool pointer
2669 LoadPoolPointer(PP);
2670 }
2671
2672
2428 void Assembler::TryAllocate(const Class& cls, 2673 void Assembler::TryAllocate(const Class& cls,
2429 Label* failure, 2674 Label* failure,
2430 bool near_jump, 2675 bool near_jump,
2431 Register instance_reg) { 2676 Register instance_reg) {
2432 ASSERT(failure != NULL); 2677 ASSERT(failure != NULL);
2433 if (FLAG_inline_alloc) { 2678 if (FLAG_inline_alloc) {
2434 Heap* heap = Isolate::Current()->heap(); 2679 Heap* heap = Isolate::Current()->heap();
2435 const intptr_t instance_size = cls.instance_size(); 2680 const intptr_t instance_size = cls.instance_size();
2436 movq(TMP, Immediate(heap->TopAddress())); 2681 movq(TMP, Immediate(heap->TopAddress()));
2437 movq(instance_reg, Address(TMP, 0)); 2682 movq(instance_reg, Address(TMP, 0));
(...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after
2641 "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13", "xmm14", "xmm15" 2886 "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13", "xmm14", "xmm15"
2642 }; 2887 };
2643 2888
2644 2889
2645 const char* Assembler::FpuRegisterName(FpuRegister reg) { 2890 const char* Assembler::FpuRegisterName(FpuRegister reg) {
2646 ASSERT((0 <= reg) && (reg < kNumberOfXmmRegisters)); 2891 ASSERT((0 <= reg) && (reg < kNumberOfXmmRegisters));
2647 return xmm_reg_names[reg]; 2892 return xmm_reg_names[reg];
2648 } 2893 }
2649 2894
2650 2895
2896 void Assembler::FixedSizeAddressOffsetTest() {
2897 movq(RAX, Address(RSP, 0, true));
2898 movq(RAX, Address(RBP, 0, true));
2899 movq(RAX, Address(RAX, 0, true));
2900 movq(RAX, Address(R10, 0, true));
2901 movq(RAX, Address(R12, 0, true));
2902 movq(RAX, Address(R13, 0, true));
2903 movq(R10, Address(RAX, 0, true));
2904
2905 movq(RAX, Address(RSP, kWordSize, true));
2906 movq(RAX, Address(RBP, kWordSize, true));
2907 movq(RAX, Address(RAX, kWordSize, true));
2908 movq(RAX, Address(R10, kWordSize, true));
2909 movq(RAX, Address(R12, kWordSize, true));
2910 movq(RAX, Address(R13, kWordSize, true));
2911
2912 movq(RAX, Address(RSP, -kWordSize, true));
2913 movq(RAX, Address(RBP, -kWordSize, true));
2914 movq(RAX, Address(RAX, -kWordSize, true));
2915 movq(RAX, Address(R10, -kWordSize, true));
2916 movq(RAX, Address(R12, -kWordSize, true));
2917 movq(RAX, Address(R13, -kWordSize, true));
2918 }
2919
2651 } // namespace dart 2920 } // namespace dart
2652 2921
2653 #endif // defined TARGET_ARCH_X64 2922 #endif // defined TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698