OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_ARM) | 6 #if defined(TARGET_ARCH_ARM) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/code_generator.h" | 9 #include "vm/code_generator.h" |
10 #include "vm/compiler.h" | 10 #include "vm/compiler.h" |
(...skipping 292 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
303 // .... | 303 // .... |
304 __ CallRuntime(kInstanceFunctionLookupRuntimeEntry); | 304 __ CallRuntime(kInstanceFunctionLookupRuntimeEntry); |
305 // Remove arguments. | 305 // Remove arguments. |
306 __ Drop(4); | 306 __ Drop(4); |
307 __ Pop(R0); // Get result into R0. | 307 __ Pop(R0); // Get result into R0. |
308 __ LeaveStubFrame(); | 308 __ LeaveStubFrame(); |
309 __ Ret(); | 309 __ Ret(); |
310 } | 310 } |
311 | 311 |
312 | 312 |
| 313 DECLARE_LEAF_RUNTIME_ENTRY(intptr_t, DeoptimizeCopyFrame, |
| 314 intptr_t deopt_reason, |
| 315 uword saved_registers_address); |
| 316 |
| 317 DECLARE_LEAF_RUNTIME_ENTRY(void, DeoptimizeFillFrame, uword last_fp); |
| 318 |
| 319 |
| 320 // Used by eager and lazy deoptimization. Preserve result in R0 if necessary. |
| 321 // This stub translates optimized frame into unoptimized frame. The optimized |
| 322 // frame can contain values in registers and on stack, the unoptimized |
| 323 // frame contains all values on stack. |
| 324 // Deoptimization occurs in following steps: |
| 325 // - Push all registers that can contain values. |
| 326 // - Call C routine to copy the stack and saved registers into temporary buffer. |
| 327 // - Adjust caller's frame to correct unoptimized frame size. |
| 328 // - Fill the unoptimized frame. |
| 329 // - Materialize objects that require allocation (e.g. Double instances). |
| 330 // GC can occur only after frame is fully rewritten. |
| 331 // Stack after EnterFrame(...) below: |
| 332 // +------------------+ |
| 333 // | Saved FP | <- TOS |
| 334 // +------------------+ |
| 335 // | return-address | (deoptimization point) |
| 336 // +------------------+ |
| 337 // | optimized frame | |
| 338 // | ... | |
| 339 // |
| 340 // Parts of the code cannot GC, part of the code can GC. |
| 341 static void GenerateDeoptimizationSequence(Assembler* assembler, |
| 342 bool preserve_result) { |
| 343 __ EnterFrame((1 << FP) | (1 << LR), 0); |
| 344 // The code in this frame may not cause GC. kDeoptimizeCopyFrameRuntimeEntry |
| 345 // and kDeoptimizeFillFrameRuntimeEntry are leaf runtime calls. |
| 346 const intptr_t saved_r0_offset_from_fp = -(kNumberOfCpuRegisters - R0); |
| 347 // Result in R0 is preserved as part of pushing all registers below. |
| 348 |
| 349 // Push registers in their enumeration order: lowest register number at |
| 350 // lowest address. |
| 351 __ PushList(kAllCpuRegistersList); |
| 352 ASSERT(kFpuRegisterSize == 2 * kWordSize); |
| 353 __ vstmd(DB_W, SP, D0, static_cast<DRegister>(kNumberOfDRegisters - 1)); |
| 354 |
| 355 __ mov(R0, ShifterOperand(SP)); // Pass address of saved registers block. |
| 356 __ ReserveAlignedFrameSpace(0); |
| 357 __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry); |
| 358 // Result (R0) is stack-size (FP - SP) in bytes, incl. the return address. |
| 359 |
| 360 if (preserve_result) { |
| 361 // Restore result into R1 temporarily. |
| 362 __ ldr(R1, Address(FP, saved_r0_offset_from_fp * kWordSize)); |
| 363 } |
| 364 |
| 365 __ LeaveFrame((1 << FP) | (1 << LR)); |
| 366 __ sub(SP, FP, ShifterOperand(R0)); |
| 367 |
| 368 __ EnterFrame((1 << FP) | (1 << LR), 0); |
| 369 __ mov(R0, ShifterOperand(SP)); // Get last FP address. |
| 370 if (preserve_result) { |
| 371 __ Push(R1); // Preserve result. |
| 372 } |
| 373 __ ReserveAlignedFrameSpace(0); |
| 374 __ CallRuntime(kDeoptimizeFillFrameRuntimeEntry); // Pass last FP in R0. |
| 375 // Result (R0) is our FP. |
| 376 if (preserve_result) { |
| 377 // Restore result into R1. |
| 378 __ ldr(R1, Address(FP, -1 * kWordSize)); |
| 379 } |
| 380 // Code above cannot cause GC. |
| 381 __ LeaveFrame((1 << FP) | (1 << LR)); |
| 382 __ mov(FP, ShifterOperand(R0)); |
| 383 |
| 384 // Frame is fully rewritten at this point and it is safe to perform a GC. |
| 385 // Materialize any objects that were deferred by FillFrame because they |
| 386 // require allocation. |
| 387 __ EnterStubFrame(); |
| 388 if (preserve_result) { |
| 389 __ Push(R1); // Preserve result, it will be GC-d here. |
| 390 } |
| 391 __ CallRuntime(kDeoptimizeMaterializeDoublesRuntimeEntry); |
| 392 if (preserve_result) { |
| 393 __ Pop(R0); // Restore result. |
| 394 } |
| 395 __ LeaveStubFrame(); |
| 396 __ Ret(); |
| 397 } |
| 398 |
| 399 |
313 void StubCode::GenerateDeoptimizeLazyStub(Assembler* assembler) { | 400 void StubCode::GenerateDeoptimizeLazyStub(Assembler* assembler) { |
314 __ Unimplemented("DeoptimizeLazy stub"); | 401 __ Unimplemented("DeoptimizeLazy stub"); |
315 } | 402 } |
316 | 403 |
317 | 404 |
318 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { | 405 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { |
319 __ Unimplemented("Deoptimize stub"); | 406 GenerateDeoptimizationSequence(assembler, false); // Don't preserve R0. |
320 } | 407 } |
321 | 408 |
322 | 409 |
323 void StubCode::GenerateMegamorphicMissStub(Assembler* assembler) { | 410 void StubCode::GenerateMegamorphicMissStub(Assembler* assembler) { |
324 __ Unimplemented("MegamorphicMiss stub"); | 411 __ Unimplemented("MegamorphicMiss stub"); |
325 } | 412 } |
326 | 413 |
327 | 414 |
328 // Called for inline allocation of arrays. | 415 // Called for inline allocation of arrays. |
329 // Input parameters: | 416 // Input parameters: |
(...skipping 814 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1144 __ LeaveStubFrame(true); | 1231 __ LeaveStubFrame(true); |
1145 __ Ret(); | 1232 __ Ret(); |
1146 } | 1233 } |
1147 | 1234 |
1148 | 1235 |
1149 void StubCode::GenerateCallNoSuchMethodFunctionStub(Assembler* assembler) { | 1236 void StubCode::GenerateCallNoSuchMethodFunctionStub(Assembler* assembler) { |
1150 __ Unimplemented("CallNoSuchMethodFunction stub"); | 1237 __ Unimplemented("CallNoSuchMethodFunction stub"); |
1151 } | 1238 } |
1152 | 1239 |
1153 | 1240 |
| 1241 // R6: function object. |
| 1242 // R5: inline cache data object. |
| 1243 // R4: arguments descriptor array. |
1154 void StubCode::GenerateOptimizedUsageCounterIncrement(Assembler* assembler) { | 1244 void StubCode::GenerateOptimizedUsageCounterIncrement(Assembler* assembler) { |
1155 __ Unimplemented("OptimizedUsageCounterIncrement stub"); | 1245 Register ic_reg = R5; |
| 1246 Register func_reg = R6; |
| 1247 if (FLAG_trace_optimized_ic_calls) { |
| 1248 __ EnterStubFrame(); |
| 1249 __ PushList((1 << R4) | (1 << R5) | (1 << R6)); // Preserve. |
| 1250 __ Push(ic_reg); // Argument. |
| 1251 __ Push(func_reg); // Argument. |
| 1252 __ CallRuntime(kTraceICCallRuntimeEntry); |
| 1253 __ Drop(2); // Discard argument; |
| 1254 __ PushList((1 << R4) | (1 << R5) | (1 << R6)); // Restore. |
| 1255 __ LeaveStubFrame(); |
| 1256 } |
| 1257 __ ldr(R7, FieldAddress(func_reg, Function::usage_counter_offset())); |
| 1258 Label is_hot; |
| 1259 if (FlowGraphCompiler::CanOptimize()) { |
| 1260 ASSERT(FLAG_optimization_counter_threshold > 1); |
| 1261 __ CompareImmediate(R7, FLAG_optimization_counter_threshold); |
| 1262 __ b(&is_hot, GE); |
| 1263 // As long as VM has no OSR do not optimize in the middle of the function |
| 1264 // but only at exit so that we have collected all type feedback before |
| 1265 // optimizing. |
| 1266 } |
| 1267 __ add(R7, R7, ShifterOperand(1)); |
| 1268 __ str(R7, FieldAddress(func_reg, Function::usage_counter_offset())); |
| 1269 __ Bind(&is_hot); |
1156 } | 1270 } |
1157 | 1271 |
1158 | 1272 |
1159 // Loads function into 'temp_reg'. | 1273 // Loads function into 'temp_reg'. |
1160 void StubCode::GenerateUsageCounterIncrement(Assembler* assembler, | 1274 void StubCode::GenerateUsageCounterIncrement(Assembler* assembler, |
1161 Register temp_reg) { | 1275 Register temp_reg) { |
1162 Register ic_reg = R5; | 1276 Register ic_reg = R5; |
1163 Register func_reg = temp_reg; | 1277 Register func_reg = temp_reg; |
1164 ASSERT(temp_reg == R6); | 1278 ASSERT(temp_reg == R6); |
1165 __ ldr(func_reg, FieldAddress(ic_reg, ICData::function_offset())); | 1279 __ ldr(func_reg, FieldAddress(ic_reg, ICData::function_offset())); |
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1398 GenerateNArgsCheckInlineCacheStub(assembler, 1); | 1512 GenerateNArgsCheckInlineCacheStub(assembler, 1); |
1399 } | 1513 } |
1400 | 1514 |
1401 | 1515 |
1402 void StubCode::GenerateMegamorphicCallStub(Assembler* assembler) { | 1516 void StubCode::GenerateMegamorphicCallStub(Assembler* assembler) { |
1403 GenerateNArgsCheckInlineCacheStub(assembler, 1); | 1517 GenerateNArgsCheckInlineCacheStub(assembler, 1); |
1404 } | 1518 } |
1405 | 1519 |
1406 | 1520 |
1407 // LR: return address (Dart code). | 1521 // LR: return address (Dart code). |
1408 // R4: Arguments descriptor array. | 1522 // R4: arguments descriptor array. |
1409 void StubCode::GenerateBreakpointStaticStub(Assembler* assembler) { | 1523 void StubCode::GenerateBreakpointStaticStub(Assembler* assembler) { |
1410 // Create a stub frame as we are pushing some objects on the stack before | 1524 // Create a stub frame as we are pushing some objects on the stack before |
1411 // calling into the runtime. | 1525 // calling into the runtime. |
1412 __ EnterStubFrame(); | 1526 __ EnterStubFrame(); |
1413 __ LoadImmediate(R0, reinterpret_cast<intptr_t>(Object::null())); | 1527 __ LoadImmediate(R0, reinterpret_cast<intptr_t>(Object::null())); |
1414 // // Preserve arguments descriptor and make room for result. | 1528 // // Preserve arguments descriptor and make room for result. |
1415 __ PushList((1 << R0) | (1 << R4)); | 1529 __ PushList((1 << R0) | (1 << R4)); |
1416 __ CallRuntime(kBreakpointStaticHandlerRuntimeEntry); | 1530 __ CallRuntime(kBreakpointStaticHandlerRuntimeEntry); |
1417 // Pop code object result and restore arguments descriptor. | 1531 // Pop code object result and restore arguments descriptor. |
1418 __ PopList((1 << R0) | (1 << R4)); | 1532 __ PopList((1 << R0) | (1 << R4)); |
(...skipping 18 matching lines...) Expand all Loading... |
1437 __ LeaveStubFrame(); | 1551 __ LeaveStubFrame(); |
1438 | 1552 |
1439 // Instead of returning to the patched Dart function, emulate the | 1553 // Instead of returning to the patched Dart function, emulate the |
1440 // smashed return code pattern and return to the function's caller. | 1554 // smashed return code pattern and return to the function's caller. |
1441 __ LeaveDartFrame(); | 1555 __ LeaveDartFrame(); |
1442 __ Ret(); | 1556 __ Ret(); |
1443 } | 1557 } |
1444 | 1558 |
1445 | 1559 |
1446 // LR: return address (Dart code). | 1560 // LR: return address (Dart code). |
1447 // R5: Inline cache data array. | 1561 // R5: inline cache data array. |
1448 // R4: Arguments descriptor array. | 1562 // R4: arguments descriptor array. |
1449 void StubCode::GenerateBreakpointDynamicStub(Assembler* assembler) { | 1563 void StubCode::GenerateBreakpointDynamicStub(Assembler* assembler) { |
1450 // Create a stub frame as we are pushing some objects on the stack before | 1564 // Create a stub frame as we are pushing some objects on the stack before |
1451 // calling into the runtime. | 1565 // calling into the runtime. |
1452 __ EnterStubFrame(); | 1566 __ EnterStubFrame(); |
1453 __ PushList((1 << R4) | (1 << R5)); | 1567 __ PushList((1 << R4) | (1 << R5)); |
1454 __ CallRuntime(kBreakpointDynamicHandlerRuntimeEntry); | 1568 __ CallRuntime(kBreakpointDynamicHandlerRuntimeEntry); |
1455 __ PopList((1 << R4) | (1 << R5)); | 1569 __ PopList((1 << R4) | (1 << R5)); |
1456 __ LeaveStubFrame(); | 1570 __ LeaveStubFrame(); |
1457 | 1571 |
1458 // Find out which dispatch stub to call. | 1572 // Find out which dispatch stub to call. |
(...skipping 329 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1788 __ Bind(&reference_compare); | 1902 __ Bind(&reference_compare); |
1789 __ cmp(left, ShifterOperand(right)); | 1903 __ cmp(left, ShifterOperand(right)); |
1790 __ Bind(&done); | 1904 __ Bind(&done); |
1791 __ PopList((1 << R0) | (1 << R1) | (1 << R2)); | 1905 __ PopList((1 << R0) | (1 << R1) | (1 << R2)); |
1792 __ Ret(); | 1906 __ Ret(); |
1793 } | 1907 } |
1794 | 1908 |
1795 } // namespace dart | 1909 } // namespace dart |
1796 | 1910 |
1797 #endif // defined TARGET_ARCH_ARM | 1911 #endif // defined TARGET_ARCH_ARM |
OLD | NEW |