OLD | NEW |
1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_ARM64) | 6 #if defined(TARGET_ARCH_ARM64) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/code_generator.h" | 9 #include "vm/code_generator.h" |
10 #include "vm/compiler.h" | 10 #include "vm/compiler.h" |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
44 | 44 |
45 __ SetPrologueOffset(); | 45 __ SetPrologueOffset(); |
46 __ Comment("CallToRuntimeStub"); | 46 __ Comment("CallToRuntimeStub"); |
47 __ EnterFrame(0); | 47 __ EnterFrame(0); |
48 | 48 |
49 // Load current Isolate pointer from Context structure into R0. | 49 // Load current Isolate pointer from Context structure into R0. |
50 __ LoadFieldFromOffset(R0, CTX, Context::isolate_offset(), kNoPP); | 50 __ LoadFieldFromOffset(R0, CTX, Context::isolate_offset(), kNoPP); |
51 | 51 |
52 // Save exit frame information to enable stack walking as we are about | 52 // Save exit frame information to enable stack walking as we are about |
53 // to transition to Dart VM C++ code. | 53 // to transition to Dart VM C++ code. |
54 __ mov(TMP, SP); // Can't directly store SP. | 54 __ StoreToOffset(SP, R0, Isolate::top_exit_frame_info_offset(), kNoPP); |
55 __ StoreToOffset(TMP, R0, Isolate::top_exit_frame_info_offset(), kNoPP); | |
56 | 55 |
57 // Save current Context pointer into Isolate structure. | 56 // Save current Context pointer into Isolate structure. |
58 __ StoreToOffset(CTX, R0, Isolate::top_context_offset(), kNoPP); | 57 __ StoreToOffset(CTX, R0, Isolate::top_context_offset(), kNoPP); |
59 | 58 |
60 // Cache Isolate pointer into CTX while executing runtime code. | 59 // Cache Isolate pointer into CTX while executing runtime code. |
61 __ mov(CTX, R0); | 60 __ mov(CTX, R0); |
62 | 61 |
63 #if defined(DEBUG) | 62 #if defined(DEBUG) |
64 { Label ok; | 63 { Label ok; |
65 // Check that we are always entering from Dart code. | 64 // Check that we are always entering from Dart code. |
66 __ LoadFromOffset(R8, R0, Isolate::vm_tag_offset(), kNoPP); | 65 __ LoadFromOffset(R8, R0, Isolate::vm_tag_offset(), kNoPP); |
67 __ CompareImmediate(R8, VMTag::kScriptTagId, kNoPP); | 66 __ CompareImmediate(R8, VMTag::kScriptTagId, kNoPP); |
68 __ b(&ok, EQ); | 67 __ b(&ok, EQ); |
69 __ Stop("Not coming from Dart code."); | 68 __ Stop("Not coming from Dart code."); |
70 __ Bind(&ok); | 69 __ Bind(&ok); |
71 } | 70 } |
72 #endif | 71 #endif |
73 | 72 |
74 // Mark that the isolate is executing VM code. | 73 // Mark that the isolate is executing VM code. |
75 __ StoreToOffset(R5, R0, Isolate::vm_tag_offset(), kNoPP); | 74 __ StoreToOffset(R5, R0, Isolate::vm_tag_offset(), kNoPP); |
76 | 75 |
77 // Reserve space for arguments and align frame before entering C++ world. | 76 // Reserve space for arguments and align frame before entering C++ world. |
78 // NativeArguments are passed in registers. | 77 // NativeArguments are passed in registers. |
79 __ Comment("align stack"); | 78 __ Comment("align stack"); |
| 79 // Reserve space for arguments. |
80 ASSERT(sizeof(NativeArguments) == 4 * kWordSize); | 80 ASSERT(sizeof(NativeArguments) == 4 * kWordSize); |
81 __ ReserveAlignedFrameSpace(4 * kWordSize); // Reserve space for arguments. | 81 __ ReserveAlignedFrameSpace(sizeof(NativeArguments)); |
82 | 82 |
83 // Pass NativeArguments structure by value and call runtime. | 83 // Pass NativeArguments structure by value and call runtime. |
84 // Registers R0, R1, R2, and R3 are used. | 84 // Registers R0, R1, R2, and R3 are used. |
85 | 85 |
86 ASSERT(isolate_offset == 0 * kWordSize); | 86 ASSERT(isolate_offset == 0 * kWordSize); |
87 // Set isolate in NativeArgs: R0 already contains CTX. | 87 // Set isolate in NativeArgs: R0 already contains CTX. |
88 | 88 |
89 // There are no runtime calls to closures, so we do not need to set the tag | 89 // There are no runtime calls to closures, so we do not need to set the tag |
90 // bits kClosureFunctionBit and kInstanceFunctionBit in argc_tag_. | 90 // bits kClosureFunctionBit and kInstanceFunctionBit in argc_tag_. |
91 ASSERT(argc_tag_offset == 1 * kWordSize); | 91 ASSERT(argc_tag_offset == 1 * kWordSize); |
92 __ mov(R1, R4); // Set argc in NativeArguments. | 92 __ mov(R1, R4); // Set argc in NativeArguments. |
93 | 93 |
94 ASSERT(argv_offset == 2 * kWordSize); | 94 ASSERT(argv_offset == 2 * kWordSize); |
95 __ add(R2, ZR, Operand(R4, LSL, 3)); | 95 __ add(R2, ZR, Operand(R4, LSL, 3)); |
96 __ add(R2, FP, Operand(R2)); // Compute argv. | 96 __ add(R2, FP, Operand(R2)); // Compute argv. |
97 // Set argv in NativeArguments. | 97 // Set argv in NativeArguments. |
98 __ AddImmediate(R2, R2, exitframe_last_param_slot_from_fp * kWordSize, | 98 __ AddImmediate(R2, R2, exitframe_last_param_slot_from_fp * kWordSize, |
99 kNoPP); | 99 kNoPP); |
100 | 100 |
101 ASSERT(retval_offset == 3 * kWordSize); | 101 ASSERT(retval_offset == 3 * kWordSize); |
102 __ AddImmediate(R3, R2, kWordSize, kNoPP); | 102 __ AddImmediate(R3, R2, kWordSize, kNoPP); |
103 | 103 |
104 // TODO(zra): Check that the ABI allows calling through this register. | 104 __ StoreToOffset(R0, SP, isolate_offset, kNoPP); |
| 105 __ StoreToOffset(R1, SP, argc_tag_offset, kNoPP); |
| 106 __ StoreToOffset(R2, SP, argv_offset, kNoPP); |
| 107 __ StoreToOffset(R3, SP, retval_offset, kNoPP); |
| 108 __ mov(R0, SP); // Pass the pointer to the NativeArguments. |
| 109 |
| 110 // We are entering runtime code, so the C stack pointer must be restored from |
| 111 // the stack limit to the top of the stack. We cache the stack limit address |
| 112 // in a callee-saved register. |
| 113 __ mov(R26, CSP); |
| 114 __ mov(CSP, SP); |
| 115 |
105 __ blr(R5); | 116 __ blr(R5); |
| 117 __ Comment("CallToRuntimeStub return"); |
| 118 |
| 119 // Restore SP and CSP. |
| 120 __ mov(SP, CSP); |
| 121 __ mov(CSP, R26); |
106 | 122 |
107 // Retval is next to 1st argument. | 123 // Retval is next to 1st argument. |
108 __ Comment("CallToRuntimeStub return"); | |
109 | |
110 // Mark that the isolate is executing Dart code. | 124 // Mark that the isolate is executing Dart code. |
111 __ LoadImmediate(R2, VMTag::kScriptTagId, kNoPP); | 125 __ LoadImmediate(R2, VMTag::kScriptTagId, kNoPP); |
112 __ StoreToOffset(R2, CTX, Isolate::vm_tag_offset(), kNoPP); | 126 __ StoreToOffset(R2, CTX, Isolate::vm_tag_offset(), kNoPP); |
113 | 127 |
114 // Reset exit frame information in Isolate structure. | 128 // Reset exit frame information in Isolate structure. |
115 __ StoreToOffset(ZR, CTX, Isolate::top_exit_frame_info_offset(), kNoPP); | 129 __ StoreToOffset(ZR, CTX, Isolate::top_exit_frame_info_offset(), kNoPP); |
116 | 130 |
117 // Load Context pointer from Isolate structure into A2. | 131 // Load Context pointer from Isolate structure into A2. |
118 __ LoadFromOffset(R2, CTX, Isolate::top_context_offset(), kNoPP); | 132 __ LoadFromOffset(R2, CTX, Isolate::top_context_offset(), kNoPP); |
119 | 133 |
(...skipping 28 matching lines...) Expand all Loading... |
148 const intptr_t argv_offset = NativeArguments::argv_offset(); | 162 const intptr_t argv_offset = NativeArguments::argv_offset(); |
149 const intptr_t retval_offset = NativeArguments::retval_offset(); | 163 const intptr_t retval_offset = NativeArguments::retval_offset(); |
150 | 164 |
151 __ EnterFrame(0); | 165 __ EnterFrame(0); |
152 | 166 |
153 // Load current Isolate pointer from Context structure into R0. | 167 // Load current Isolate pointer from Context structure into R0. |
154 __ LoadFieldFromOffset(R0, CTX, Context::isolate_offset(), kNoPP); | 168 __ LoadFieldFromOffset(R0, CTX, Context::isolate_offset(), kNoPP); |
155 | 169 |
156 // Save exit frame information to enable stack walking as we are about | 170 // Save exit frame information to enable stack walking as we are about |
157 // to transition to native code. | 171 // to transition to native code. |
158 __ mov(TMP, SP); | 172 __ StoreToOffset(SP, R0, Isolate::top_exit_frame_info_offset(), kNoPP); |
159 __ StoreToOffset(TMP, R0, Isolate::top_exit_frame_info_offset(), kNoPP); | |
160 | 173 |
161 // Save current Context pointer into Isolate structure. | 174 // Save current Context pointer into Isolate structure. |
162 __ StoreToOffset(CTX, R0, Isolate::top_context_offset(), kNoPP); | 175 __ StoreToOffset(CTX, R0, Isolate::top_context_offset(), kNoPP); |
163 | 176 |
164 // Cache Isolate pointer into CTX while executing native code. | 177 // Cache Isolate pointer into CTX while executing native code. |
165 __ mov(CTX, R0); | 178 __ mov(CTX, R0); |
166 | 179 |
167 #if defined(DEBUG) | 180 #if defined(DEBUG) |
168 { Label ok; | 181 { Label ok; |
169 // Check that we are always entering from Dart code. | 182 // Check that we are always entering from Dart code. |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
203 | 216 |
204 // Passing the structure by value as in runtime calls would require changing | 217 // Passing the structure by value as in runtime calls would require changing |
205 // Dart API for native functions. | 218 // Dart API for native functions. |
206 // For now, space is reserved on the stack and we pass a pointer to it. | 219 // For now, space is reserved on the stack and we pass a pointer to it. |
207 __ StoreToOffset(R0, SP, isolate_offset, kNoPP); | 220 __ StoreToOffset(R0, SP, isolate_offset, kNoPP); |
208 __ StoreToOffset(R1, SP, argc_tag_offset, kNoPP); | 221 __ StoreToOffset(R1, SP, argc_tag_offset, kNoPP); |
209 __ StoreToOffset(R2, SP, argv_offset, kNoPP); | 222 __ StoreToOffset(R2, SP, argv_offset, kNoPP); |
210 __ StoreToOffset(R3, SP, retval_offset, kNoPP); | 223 __ StoreToOffset(R3, SP, retval_offset, kNoPP); |
211 __ mov(R0, SP); // Pass the pointer to the NativeArguments. | 224 __ mov(R0, SP); // Pass the pointer to the NativeArguments. |
212 | 225 |
| 226 // We are entering runtime code, so the C stack pointer must be restored from |
| 227 // the stack limit to the top of the stack. We cache the stack limit address |
| 228 // in the Dart SP register, which is callee-saved in the C ABI. |
| 229 __ mov(R26, CSP); |
| 230 __ mov(CSP, SP); |
| 231 |
213 // Call native function (setsup scope if not leaf function). | 232 // Call native function (setsup scope if not leaf function). |
214 Label leaf_call; | 233 Label leaf_call; |
215 Label done; | 234 Label done; |
216 __ TestImmediate(R1, NativeArguments::AutoSetupScopeMask(), kNoPP); | 235 __ TestImmediate(R1, NativeArguments::AutoSetupScopeMask(), kNoPP); |
217 __ b(&leaf_call, EQ); | 236 __ b(&leaf_call, EQ); |
218 | 237 |
219 __ mov(R1, R5); // Pass the function entrypoint to call. | 238 __ mov(R1, R5); // Pass the function entrypoint to call. |
220 // Call native function invocation wrapper or redirection via simulator. | 239 // Call native function invocation wrapper or redirection via simulator. |
221 #if defined(USING_SIMULATOR) | 240 #if defined(USING_SIMULATOR) |
222 uword entry = reinterpret_cast<uword>(NativeEntry::NativeCallWrapper); | 241 uword entry = reinterpret_cast<uword>(NativeEntry::NativeCallWrapper); |
223 entry = Simulator::RedirectExternalReference( | 242 entry = Simulator::RedirectExternalReference( |
224 entry, Simulator::kNativeCall, NativeEntry::kNumCallWrapperArguments); | 243 entry, Simulator::kNativeCall, NativeEntry::kNumCallWrapperArguments); |
225 __ LoadImmediate(R2, entry, kNoPP); | 244 __ LoadImmediate(R2, entry, kNoPP); |
226 __ blr(R2); | 245 __ blr(R2); |
227 #else | 246 #else |
228 __ BranchLink(&NativeEntry::NativeCallWrapperLabel(), kNoPP); | 247 __ BranchLink(&NativeEntry::NativeCallWrapperLabel(), kNoPP); |
229 #endif | 248 #endif |
230 __ b(&done); | 249 __ b(&done); |
231 | 250 |
232 __ Bind(&leaf_call); | 251 __ Bind(&leaf_call); |
233 // Call native function or redirection via simulator. | 252 // Call native function or redirection via simulator. |
234 __ blr(R5); | 253 __ blr(R5); |
235 | 254 |
236 __ Bind(&done); | 255 __ Bind(&done); |
| 256 // Restore SP and CSP. |
| 257 __ mov(SP, CSP); |
| 258 __ mov(CSP, R26); |
237 | 259 |
238 // Mark that the isolate is executing Dart code. | 260 // Mark that the isolate is executing Dart code. |
239 __ LoadImmediate(R2, VMTag::kScriptTagId, kNoPP); | 261 __ LoadImmediate(R2, VMTag::kScriptTagId, kNoPP); |
240 __ StoreToOffset(R2, CTX, Isolate::vm_tag_offset(), kNoPP); | 262 __ StoreToOffset(R2, CTX, Isolate::vm_tag_offset(), kNoPP); |
241 | 263 |
242 // Reset exit frame information in Isolate structure. | 264 // Reset exit frame information in Isolate structure. |
243 __ StoreToOffset(ZR, CTX, Isolate::top_exit_frame_info_offset(), kNoPP); | 265 __ StoreToOffset(ZR, CTX, Isolate::top_exit_frame_info_offset(), kNoPP); |
244 | 266 |
245 // Load Context pointer from Isolate structure into R2. | 267 // Load Context pointer from Isolate structure into R2. |
246 __ LoadFromOffset(R2, CTX, Isolate::top_context_offset(), kNoPP); | 268 __ LoadFromOffset(R2, CTX, Isolate::top_context_offset(), kNoPP); |
(...skipping 22 matching lines...) Expand all Loading... |
269 const intptr_t argv_offset = NativeArguments::argv_offset(); | 291 const intptr_t argv_offset = NativeArguments::argv_offset(); |
270 const intptr_t retval_offset = NativeArguments::retval_offset(); | 292 const intptr_t retval_offset = NativeArguments::retval_offset(); |
271 | 293 |
272 __ EnterFrame(0); | 294 __ EnterFrame(0); |
273 | 295 |
274 // Load current Isolate pointer from Context structure into R0. | 296 // Load current Isolate pointer from Context structure into R0. |
275 __ LoadFieldFromOffset(R0, CTX, Context::isolate_offset(), kNoPP); | 297 __ LoadFieldFromOffset(R0, CTX, Context::isolate_offset(), kNoPP); |
276 | 298 |
277 // Save exit frame information to enable stack walking as we are about | 299 // Save exit frame information to enable stack walking as we are about |
278 // to transition to native code. | 300 // to transition to native code. |
279 __ mov(TMP, SP); // Can't store SP directly, first copy to TMP. | 301 __ StoreToOffset(SP, R0, Isolate::top_exit_frame_info_offset(), kNoPP); |
280 __ StoreToOffset(TMP, R0, Isolate::top_exit_frame_info_offset(), kNoPP); | |
281 | 302 |
282 // Save current Context pointer into Isolate structure. | 303 // Save current Context pointer into Isolate structure. |
283 __ StoreToOffset(CTX, R0, Isolate::top_context_offset(), kNoPP); | 304 __ StoreToOffset(CTX, R0, Isolate::top_context_offset(), kNoPP); |
284 | 305 |
285 // Cache Isolate pointer into CTX while executing native code. | 306 // Cache Isolate pointer into CTX while executing native code. |
286 __ mov(CTX, R0); | 307 __ mov(CTX, R0); |
287 | 308 |
288 #if defined(DEBUG) | 309 #if defined(DEBUG) |
289 { Label ok; | 310 { Label ok; |
290 // Check that we are always entering from Dart code. | 311 // Check that we are always entering from Dart code. |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
324 | 345 |
325 // Passing the structure by value as in runtime calls would require changing | 346 // Passing the structure by value as in runtime calls would require changing |
326 // Dart API for native functions. | 347 // Dart API for native functions. |
327 // For now, space is reserved on the stack and we pass a pointer to it. | 348 // For now, space is reserved on the stack and we pass a pointer to it. |
328 __ StoreToOffset(R0, SP, isolate_offset, kNoPP); | 349 __ StoreToOffset(R0, SP, isolate_offset, kNoPP); |
329 __ StoreToOffset(R1, SP, argc_tag_offset, kNoPP); | 350 __ StoreToOffset(R1, SP, argc_tag_offset, kNoPP); |
330 __ StoreToOffset(R2, SP, argv_offset, kNoPP); | 351 __ StoreToOffset(R2, SP, argv_offset, kNoPP); |
331 __ StoreToOffset(R3, SP, retval_offset, kNoPP); | 352 __ StoreToOffset(R3, SP, retval_offset, kNoPP); |
332 __ mov(R0, SP); // Pass the pointer to the NativeArguments. | 353 __ mov(R0, SP); // Pass the pointer to the NativeArguments. |
333 | 354 |
| 355 // We are entering runtime code, so the C stack pointer must be restored from |
| 356 // the stack limit to the top of the stack. We cache the stack limit address |
| 357 // in the Dart SP register, which is callee-saved in the C ABI. |
| 358 __ mov(R26, CSP); |
| 359 __ mov(CSP, SP); |
| 360 |
334 // Call native function or redirection via simulator. | 361 // Call native function or redirection via simulator. |
335 __ blr(R5); | 362 __ blr(R5); |
336 | 363 |
| 364 // Restore SP and CSP. |
| 365 __ mov(SP, CSP); |
| 366 __ mov(CSP, R26); |
| 367 |
337 // Mark that the isolate is executing Dart code. | 368 // Mark that the isolate is executing Dart code. |
338 __ LoadImmediate(R2, VMTag::kScriptTagId, kNoPP); | 369 __ LoadImmediate(R2, VMTag::kScriptTagId, kNoPP); |
339 __ StoreToOffset(R2, CTX, Isolate::vm_tag_offset(), kNoPP); | 370 __ StoreToOffset(R2, CTX, Isolate::vm_tag_offset(), kNoPP); |
340 | 371 |
341 // Reset exit frame information in Isolate structure. | 372 // Reset exit frame information in Isolate structure. |
342 __ StoreToOffset(ZR, CTX, Isolate::top_exit_frame_info_offset(), kNoPP); | 373 __ StoreToOffset(ZR, CTX, Isolate::top_exit_frame_info_offset(), kNoPP); |
343 | 374 |
344 // Load Context pointer from Isolate structure into R2. | 375 // Load Context pointer from Isolate structure into R2. |
345 __ LoadFromOffset(R2, CTX, Isolate::top_context_offset(), kNoPP); | 376 __ LoadFromOffset(R2, CTX, Isolate::top_context_offset(), kNoPP); |
346 | 377 |
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
480 | 511 |
481 // Push registers in their enumeration order: lowest register number at | 512 // Push registers in their enumeration order: lowest register number at |
482 // lowest address. | 513 // lowest address. |
483 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; i--) { | 514 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; i--) { |
484 const Register r = static_cast<Register>(i); | 515 const Register r = static_cast<Register>(i); |
485 __ str(r, Address(SP, -1 * kWordSize, Address::PreIndex)); | 516 __ str(r, Address(SP, -1 * kWordSize, Address::PreIndex)); |
486 } | 517 } |
487 | 518 |
488 for (intptr_t reg_idx = kNumberOfVRegisters - 1; reg_idx >= 0; reg_idx--) { | 519 for (intptr_t reg_idx = kNumberOfVRegisters - 1; reg_idx >= 0; reg_idx--) { |
489 VRegister vreg = static_cast<VRegister>(reg_idx); | 520 VRegister vreg = static_cast<VRegister>(reg_idx); |
490 // TODO(zra): Save whole V registers. For now, push twice. | 521 __ PushQuad(vreg); |
491 __ PushDouble(vreg); | |
492 __ PushDouble(vreg); | |
493 } | 522 } |
494 | 523 |
495 __ mov(R0, SP); // Pass address of saved registers block. | 524 __ mov(R0, SP); // Pass address of saved registers block. |
496 __ ReserveAlignedFrameSpace(0); | 525 __ ReserveAlignedFrameSpace(0); |
497 __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry, 1); | 526 __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry, 1); |
498 // Result (R0) is stack-size (FP - SP) in bytes. | 527 // Result (R0) is stack-size (FP - SP) in bytes. |
499 | 528 |
500 if (preserve_result) { | 529 if (preserve_result) { |
501 // Restore result into R1 temporarily. | 530 // Restore result into R1 temporarily. |
502 __ LoadFromOffset(R1, FP, saved_result_slot_from_fp * kWordSize, kNoPP); | 531 __ LoadFromOffset(R1, FP, saved_result_slot_from_fp * kWordSize, kNoPP); |
503 } | 532 } |
504 | 533 |
505 // There is a Dart Frame on the stack. We must restore PP and leave frame. | 534 // There is a Dart Frame on the stack. We must restore PP and leave frame. |
506 __ LeaveDartFrame(); | 535 __ LeaveDartFrame(); |
507 __ sub(TMP, FP, Operand(R0)); | 536 __ sub(SP, FP, Operand(R0)); |
508 __ mov(SP, TMP); | |
509 | 537 |
510 // DeoptimizeFillFrame expects a Dart frame, i.e. EnterDartFrame(0), but there | 538 // DeoptimizeFillFrame expects a Dart frame, i.e. EnterDartFrame(0), but there |
511 // is no need to set the correct PC marker or load PP, since they get patched. | 539 // is no need to set the correct PC marker or load PP, since they get patched. |
512 __ EnterFrame(0); | 540 __ EnterFrame(0); |
513 __ Push(ZR); | 541 __ Push(ZR); |
514 __ TagAndPushPP(); | 542 __ TagAndPushPP(); |
515 | 543 |
516 if (preserve_result) { | 544 if (preserve_result) { |
517 __ Push(R1); // Preserve result as first local. | 545 __ Push(R1); // Preserve result as first local. |
518 } | 546 } |
(...skipping 21 matching lines...) Expand all Loading... |
540 // Result tells stub how many bytes to remove from the expression stack | 568 // Result tells stub how many bytes to remove from the expression stack |
541 // of the bottom-most frame. They were used as materialization arguments. | 569 // of the bottom-most frame. They were used as materialization arguments. |
542 __ Pop(R1); | 570 __ Pop(R1); |
543 __ SmiUntag(R1); | 571 __ SmiUntag(R1); |
544 if (preserve_result) { | 572 if (preserve_result) { |
545 __ Pop(R0); // Restore result. | 573 __ Pop(R0); // Restore result. |
546 __ Drop(1); // Workaround for dropped stack slot during GC. | 574 __ Drop(1); // Workaround for dropped stack slot during GC. |
547 } | 575 } |
548 __ LeaveStubFrame(); | 576 __ LeaveStubFrame(); |
549 // Remove materialization arguments. | 577 // Remove materialization arguments. |
550 __ add(TMP, SP, Operand(R1, UXTX, 0)); | 578 __ add(SP, SP, Operand(R1)); |
551 __ mov(SP, TMP); | |
552 __ ret(); | 579 __ ret(); |
553 } | 580 } |
554 | 581 |
555 | 582 |
556 void StubCode::GenerateDeoptimizeLazyStub(Assembler* assembler) { | 583 void StubCode::GenerateDeoptimizeLazyStub(Assembler* assembler) { |
557 // Correct return address to point just after the call that is being | 584 // Correct return address to point just after the call that is being |
558 // deoptimized. | 585 // deoptimized. |
559 __ AddImmediate(LR, LR, -CallPattern::kLengthInBytes, kNoPP); | 586 __ AddImmediate(LR, LR, -CallPattern::kLengthInBytes, kNoPP); |
560 GenerateDeoptimizationSequence(assembler, true); // Preserve R0. | 587 GenerateDeoptimizationSequence(assembler, true); // Preserve R0. |
561 } | 588 } |
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
741 | 768 |
742 // Called when invoking Dart code from C++ (VM code). | 769 // Called when invoking Dart code from C++ (VM code). |
743 // Input parameters: | 770 // Input parameters: |
744 // LR : points to return address. | 771 // LR : points to return address. |
745 // R0 : entrypoint of the Dart function to call. | 772 // R0 : entrypoint of the Dart function to call. |
746 // R1 : arguments descriptor array. | 773 // R1 : arguments descriptor array. |
747 // R2 : arguments array. | 774 // R2 : arguments array. |
748 // R3 : new context containing the current isolate pointer. | 775 // R3 : new context containing the current isolate pointer. |
749 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) { | 776 void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) { |
750 __ Comment("InvokeDartCodeStub"); | 777 __ Comment("InvokeDartCodeStub"); |
| 778 // Copy the C stack pointer (R31) into the stack pointer we'll actually use |
| 779 // to access the stack. |
| 780 __ mov(SP, CSP); |
751 __ EnterFrame(0); | 781 __ EnterFrame(0); |
752 | 782 |
753 // The new context, saved vm tag, the top exit frame, and the old context. | 783 // The new context, saved vm tag, the top exit frame, and the old context. |
754 const intptr_t kNewContextOffsetFromFp = | 784 const intptr_t kNewContextOffsetFromFp = |
755 -(1 + kAbiPreservedCpuRegCount + kAbiPreservedFpuRegCount) * kWordSize; | 785 -(1 + kAbiPreservedCpuRegCount + kAbiPreservedFpuRegCount) * kWordSize; |
756 | 786 |
757 // Save the callee-saved registers. | 787 // Save the callee-saved registers. |
758 for (int i = kAbiFirstPreservedCpuReg; i <= kAbiLastPreservedCpuReg; i++) { | 788 for (int i = kAbiFirstPreservedCpuReg; i <= kAbiLastPreservedCpuReg; i++) { |
759 const Register r = static_cast<Register>(i); | 789 const Register r = static_cast<Register>(i); |
760 // We use str instead of the Push macro because we will be pushing the PP | 790 // We use str instead of the Push macro because we will be pushing the PP |
(...skipping 18 matching lines...) Expand all Loading... |
779 | 809 |
780 // The new Context structure contains a pointer to the current Isolate | 810 // The new Context structure contains a pointer to the current Isolate |
781 // structure. Cache the Context pointer in the CTX register so that it is | 811 // structure. Cache the Context pointer in the CTX register so that it is |
782 // available in generated code and calls to Isolate::Current() need not be | 812 // available in generated code and calls to Isolate::Current() need not be |
783 // done. The assumption is that this register will never be clobbered by | 813 // done. The assumption is that this register will never be clobbered by |
784 // compiled or runtime stub code. | 814 // compiled or runtime stub code. |
785 | 815 |
786 // Cache the new Context pointer into CTX while executing Dart code. | 816 // Cache the new Context pointer into CTX while executing Dart code. |
787 __ LoadFromOffset(CTX, R3, VMHandles::kOffsetOfRawPtrInHandle, PP); | 817 __ LoadFromOffset(CTX, R3, VMHandles::kOffsetOfRawPtrInHandle, PP); |
788 | 818 |
789 // Load Isolate pointer from Context structure into temporary register R4. | 819 // Load Isolate pointer from Context structure into temporary register R5. |
790 __ LoadFieldFromOffset(R5, CTX, Context::isolate_offset(), PP); | 820 __ LoadFieldFromOffset(R5, CTX, Context::isolate_offset(), PP); |
791 | 821 |
| 822 // Load the stack limit address into the C stack pointer register. |
| 823 __ LoadFromOffset(CSP, R5, Isolate::stack_limit_offset(), PP); |
| 824 |
| 825 // Cache the new Context pointer into CTX while executing Dart code. |
| 826 __ LoadFromOffset(CTX, R3, VMHandles::kOffsetOfRawPtrInHandle, PP); |
| 827 |
792 // Save the current VMTag on the stack. | 828 // Save the current VMTag on the stack. |
793 ASSERT(kSavedVMTagSlotFromEntryFp == -20); | 829 ASSERT(kSavedVMTagSlotFromEntryFp == -20); |
794 __ LoadFromOffset(R4, R5, Isolate::vm_tag_offset(), PP); | 830 __ LoadFromOffset(R4, R5, Isolate::vm_tag_offset(), PP); |
795 __ Push(R4); | 831 __ Push(R4); |
796 | 832 |
797 // Mark that the isolate is executing Dart code. | 833 // Mark that the isolate is executing Dart code. |
798 __ LoadImmediate(R6, VMTag::kScriptTagId, PP); | 834 __ LoadImmediate(R6, VMTag::kScriptTagId, PP); |
799 __ StoreToOffset(R6, R5, Isolate::vm_tag_offset(), PP); | 835 __ StoreToOffset(R6, R5, Isolate::vm_tag_offset(), PP); |
800 | 836 |
801 // Save the top exit frame info. Use R6 as a temporary register. | 837 // Save the top exit frame info. Use R6 as a temporary register. |
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
877 for (int i = kAbiLastPreservedFpuReg; i >= kAbiFirstPreservedFpuReg; i--) { | 913 for (int i = kAbiLastPreservedFpuReg; i >= kAbiFirstPreservedFpuReg; i--) { |
878 const VRegister r = static_cast<VRegister>(i); | 914 const VRegister r = static_cast<VRegister>(i); |
879 __ PopDouble(r); | 915 __ PopDouble(r); |
880 } | 916 } |
881 | 917 |
882 // Restore C++ ABI callee-saved registers. | 918 // Restore C++ ABI callee-saved registers. |
883 for (int i = kAbiLastPreservedCpuReg; i >= kAbiFirstPreservedCpuReg; i--) { | 919 for (int i = kAbiLastPreservedCpuReg; i >= kAbiFirstPreservedCpuReg; i--) { |
884 Register r = static_cast<Register>(i); | 920 Register r = static_cast<Register>(i); |
885 // We use ldr instead of the Pop macro because we will be popping the PP | 921 // We use ldr instead of the Pop macro because we will be popping the PP |
886 // register when it is not holding a pool-pointer since we are returning to | 922 // register when it is not holding a pool-pointer since we are returning to |
887 // C++ code. | 923 // C++ code. We also skip the dart stack pointer SP, since we are still |
| 924 // using it as the stack pointer. |
888 __ ldr(r, Address(SP, 1 * kWordSize, Address::PostIndex)); | 925 __ ldr(r, Address(SP, 1 * kWordSize, Address::PostIndex)); |
889 } | 926 } |
890 | 927 |
891 // Restore the frame pointer and return. | 928 // Restore the frame pointer and C stack pointer and return. |
892 __ LeaveFrame(); | 929 __ LeaveFrame(); |
| 930 __ mov(CSP, SP); |
893 __ ret(); | 931 __ ret(); |
894 } | 932 } |
895 | 933 |
896 | 934 |
897 // Called for inline allocation of contexts. | 935 // Called for inline allocation of contexts. |
898 // Input: | 936 // Input: |
899 // R1: number of context variables. | 937 // R1: number of context variables. |
900 // Output: | 938 // Output: |
901 // R0: new allocated RawContext object. | 939 // R0: new allocated RawContext object. |
902 void StubCode::GenerateAllocateContextStub(Assembler* assembler) { | 940 void StubCode::GenerateAllocateContextStub(Assembler* assembler) { |
(...skipping 876 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1779 // TOS + 1: instantiator type arguments. | 1817 // TOS + 1: instantiator type arguments. |
1780 // TOS + 2: instance. | 1818 // TOS + 2: instance. |
1781 // TOS + 3: cache array. | 1819 // TOS + 3: cache array. |
1782 // Result in RCX: null -> not found, otherwise result (true or false). | 1820 // Result in RCX: null -> not found, otherwise result (true or false). |
1783 void StubCode::GenerateSubtype3TestCacheStub(Assembler* assembler) { | 1821 void StubCode::GenerateSubtype3TestCacheStub(Assembler* assembler) { |
1784 GenerateSubtypeNTestCacheStub(assembler, 3); | 1822 GenerateSubtypeNTestCacheStub(assembler, 3); |
1785 } | 1823 } |
1786 | 1824 |
1787 | 1825 |
1788 void StubCode::GenerateGetStackPointerStub(Assembler* assembler) { | 1826 void StubCode::GenerateGetStackPointerStub(Assembler* assembler) { |
1789 __ Stop("GenerateGetStackPointerStub"); | 1827 __ mov(R0, SP); |
| 1828 __ ret(); |
1790 } | 1829 } |
1791 | 1830 |
1792 | 1831 |
1793 void StubCode::GenerateJumpToExceptionHandlerStub(Assembler* assembler) { | 1832 void StubCode::GenerateJumpToExceptionHandlerStub(Assembler* assembler) { |
1794 __ Stop("GenerateJumpToExceptionHandlerStub"); | 1833 __ Stop("GenerateJumpToExceptionHandlerStub"); |
1795 } | 1834 } |
1796 | 1835 |
1797 | 1836 |
1798 // Calls to the runtime to optimize the given function. | 1837 // Calls to the runtime to optimize the given function. |
1799 // R6: function to be re-optimized. | 1838 // R6: function to be re-optimized. |
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1924 const Register right = R0; | 1963 const Register right = R0; |
1925 __ LoadFromOffset(left, SP, 1 * kWordSize, kNoPP); | 1964 __ LoadFromOffset(left, SP, 1 * kWordSize, kNoPP); |
1926 __ LoadFromOffset(right, SP, 0 * kWordSize, kNoPP); | 1965 __ LoadFromOffset(right, SP, 0 * kWordSize, kNoPP); |
1927 GenerateIdenticalWithNumberCheckStub(assembler, left, right, temp); | 1966 GenerateIdenticalWithNumberCheckStub(assembler, left, right, temp); |
1928 __ ret(); | 1967 __ ret(); |
1929 } | 1968 } |
1930 | 1969 |
1931 } // namespace dart | 1970 } // namespace dart |
1932 | 1971 |
1933 #endif // defined TARGET_ARCH_ARM64 | 1972 #endif // defined TARGET_ARCH_ARM64 |
OLD | NEW |