OLD | NEW |
1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_ARM64) | 6 #if defined(TARGET_ARCH_ARM64) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/code_generator.h" | 9 #include "vm/code_generator.h" |
10 #include "vm/compiler.h" | 10 #include "vm/compiler.h" |
(...skipping 434 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
445 static void GenerateDeoptimizationSequence(Assembler* assembler, | 445 static void GenerateDeoptimizationSequence(Assembler* assembler, |
446 DeoptStubKind kind) { | 446 DeoptStubKind kind) { |
447 // DeoptimizeCopyFrame expects a Dart frame, i.e. EnterDartFrame(0), but there | 447 // DeoptimizeCopyFrame expects a Dart frame, i.e. EnterDartFrame(0), but there |
448 // is no need to set the correct PC marker or load PP, since they get patched. | 448 // is no need to set the correct PC marker or load PP, since they get patched. |
449 __ EnterStubFrame(); | 449 __ EnterStubFrame(); |
450 | 450 |
451 // The code in this frame may not cause GC. kDeoptimizeCopyFrameRuntimeEntry | 451 // The code in this frame may not cause GC. kDeoptimizeCopyFrameRuntimeEntry |
452 // and kDeoptimizeFillFrameRuntimeEntry are leaf runtime calls. | 452 // and kDeoptimizeFillFrameRuntimeEntry are leaf runtime calls. |
453 const intptr_t saved_result_slot_from_fp = | 453 const intptr_t saved_result_slot_from_fp = |
454 kFirstLocalSlotFromFp + 1 - (kNumberOfCpuRegisters - R0); | 454 kFirstLocalSlotFromFp + 1 - (kNumberOfCpuRegisters - R0); |
| 455 const intptr_t saved_exception_slot_from_fp = |
| 456 kFirstLocalSlotFromFp + 1 - (kNumberOfCpuRegisters - R0); |
| 457 const intptr_t saved_stacktrace_slot_from_fp = |
| 458 kFirstLocalSlotFromFp + 1 - (kNumberOfCpuRegisters - R1); |
455 // Result in R0 is preserved as part of pushing all registers below. | 459 // Result in R0 is preserved as part of pushing all registers below. |
456 | 460 |
457 // Push registers in their enumeration order: lowest register number at | 461 // Push registers in their enumeration order: lowest register number at |
458 // lowest address. | 462 // lowest address. |
459 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; i--) { | 463 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; i--) { |
460 const Register r = static_cast<Register>(i); | 464 const Register r = static_cast<Register>(i); |
461 if (r == CODE_REG) { | 465 if (r == CODE_REG) { |
462 // Save the original value of CODE_REG pushed before invoking this stub | 466 // Save the original value of CODE_REG pushed before invoking this stub |
463 // instead of the value used to call this stub. | 467 // instead of the value used to call this stub. |
464 COMPILE_ASSERT(R25 > CODE_REG); | 468 COMPILE_ASSERT(R25 > CODE_REG); |
465 __ ldr(R25, Address(FP, 2 * kWordSize)); | 469 __ ldr(R25, Address(FP, 2 * kWordSize)); |
466 __ str(R25, Address(SP, -1 * kWordSize, Address::PreIndex)); | 470 __ str(R25, Address(SP, -1 * kWordSize, Address::PreIndex)); |
467 } else { | 471 } else { |
468 __ str(r, Address(SP, -1 * kWordSize, Address::PreIndex)); | 472 __ str(r, Address(SP, -1 * kWordSize, Address::PreIndex)); |
469 } | 473 } |
470 } | 474 } |
471 | 475 |
472 for (intptr_t reg_idx = kNumberOfVRegisters - 1; reg_idx >= 0; reg_idx--) { | 476 for (intptr_t reg_idx = kNumberOfVRegisters - 1; reg_idx >= 0; reg_idx--) { |
473 VRegister vreg = static_cast<VRegister>(reg_idx); | 477 VRegister vreg = static_cast<VRegister>(reg_idx); |
474 __ PushQuad(vreg); | 478 __ PushQuad(vreg); |
475 } | 479 } |
476 | 480 |
477 __ mov(R0, SP); // Pass address of saved registers block. | 481 __ mov(R0, SP); // Pass address of saved registers block. |
478 __ LoadImmediate(R1, kind == kLazyDeopt ? 1 : 0); | 482 bool is_lazy = (kind == kLazyDeoptFromReturn) || |
| 483 (kind == kLazyDeoptFromThrow); |
| 484 __ LoadImmediate(R1, is_lazy ? 1 : 0); |
479 __ ReserveAlignedFrameSpace(0); | 485 __ ReserveAlignedFrameSpace(0); |
480 __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry, 2); | 486 __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry, 2); |
481 // Result (R0) is stack-size (FP - SP) in bytes. | 487 // Result (R0) is stack-size (FP - SP) in bytes. |
482 | 488 |
483 const bool preserve_result = (kind == kLazyDeopt); | 489 if (kind == kLazyDeoptFromReturn) { |
484 if (preserve_result) { | |
485 // Restore result into R1 temporarily. | 490 // Restore result into R1 temporarily. |
486 __ LoadFromOffset(R1, FP, saved_result_slot_from_fp * kWordSize); | 491 __ LoadFromOffset(R1, FP, saved_result_slot_from_fp * kWordSize); |
| 492 } else if (kind == kLazyDeoptFromThrow) { |
| 493 // Restore result into R1 temporarily. |
| 494 __ LoadFromOffset(R1, FP, saved_exception_slot_from_fp * kWordSize); |
| 495 __ LoadFromOffset(R2, FP, saved_stacktrace_slot_from_fp * kWordSize); |
487 } | 496 } |
488 | 497 |
489 // There is a Dart Frame on the stack. We must restore PP and leave frame. | 498 // There is a Dart Frame on the stack. We must restore PP and leave frame. |
490 __ RestoreCodePointer(); | 499 __ RestoreCodePointer(); |
491 __ LeaveStubFrame(); | 500 __ LeaveStubFrame(); |
492 __ sub(SP, FP, Operand(R0)); | 501 __ sub(SP, FP, Operand(R0)); |
493 | 502 |
494 // DeoptimizeFillFrame expects a Dart frame, i.e. EnterDartFrame(0), but there | 503 // DeoptimizeFillFrame expects a Dart frame, i.e. EnterDartFrame(0), but there |
495 // is no need to set the correct PC marker or load PP, since they get patched. | 504 // is no need to set the correct PC marker or load PP, since they get patched. |
496 __ EnterStubFrame(); | 505 __ EnterStubFrame(); |
497 | 506 |
498 if (preserve_result) { | 507 if (kind == kLazyDeoptFromReturn) { |
499 __ Push(R1); // Preserve result as first local. | 508 __ Push(R1); // Preserve result as first local. |
| 509 } else if (kind == kLazyDeoptFromThrow) { |
| 510 __ Push(R1); // Preserve exception as first local. |
| 511 __ Push(R2); // Preserve stacktrace as second local. |
500 } | 512 } |
501 __ ReserveAlignedFrameSpace(0); | 513 __ ReserveAlignedFrameSpace(0); |
502 __ mov(R0, FP); // Pass last FP as parameter in R0. | 514 __ mov(R0, FP); // Pass last FP as parameter in R0. |
503 __ CallRuntime(kDeoptimizeFillFrameRuntimeEntry, 1); | 515 __ CallRuntime(kDeoptimizeFillFrameRuntimeEntry, 1); |
504 if (preserve_result) { | 516 if (kind == kLazyDeoptFromReturn) { |
505 // Restore result into R1. | 517 // Restore result into R1. |
506 __ LoadFromOffset(R1, FP, kFirstLocalSlotFromFp * kWordSize); | 518 __ LoadFromOffset(R1, FP, kFirstLocalSlotFromFp * kWordSize); |
| 519 } else if (kind == kLazyDeoptFromThrow) { |
| 520 // Restore result into R1. |
| 521 __ LoadFromOffset(R1, FP, kFirstLocalSlotFromFp * kWordSize); |
| 522 __ LoadFromOffset(R2, FP, (kFirstLocalSlotFromFp - 1) * kWordSize); |
507 } | 523 } |
508 // Code above cannot cause GC. | 524 // Code above cannot cause GC. |
509 // There is a Dart Frame on the stack. We must restore PP and leave frame. | 525 // There is a Dart Frame on the stack. We must restore PP and leave frame. |
510 __ RestoreCodePointer(); | 526 __ RestoreCodePointer(); |
511 __ LeaveStubFrame(); | 527 __ LeaveStubFrame(); |
512 | 528 |
513 // Frame is fully rewritten at this point and it is safe to perform a GC. | 529 // Frame is fully rewritten at this point and it is safe to perform a GC. |
514 // Materialize any objects that were deferred by FillFrame because they | 530 // Materialize any objects that were deferred by FillFrame because they |
515 // require allocation. | 531 // require allocation. |
516 // Enter stub frame with loading PP. The caller's PP is not materialized yet. | 532 // Enter stub frame with loading PP. The caller's PP is not materialized yet. |
517 __ EnterStubFrame(); | 533 __ EnterStubFrame(); |
518 if (preserve_result) { | 534 if (kind == kLazyDeoptFromReturn) { |
519 __ Push(R1); // Preserve result, it will be GC-d here. | 535 __ Push(R1); // Preserve result, it will be GC-d here. |
| 536 } else if (kind == kLazyDeoptFromThrow) { |
| 537 __ Push(R1); // Preserve exception, it will be GC-d here. |
| 538 __ Push(R2); // Preserve stacktrace, it will be GC-d here. |
520 } | 539 } |
| 540 |
521 __ Push(ZR); // Space for the result. | 541 __ Push(ZR); // Space for the result. |
522 __ CallRuntime(kDeoptimizeMaterializeRuntimeEntry, 0); | 542 __ CallRuntime(kDeoptimizeMaterializeRuntimeEntry, 0); |
523 // Result tells stub how many bytes to remove from the expression stack | 543 // Result tells stub how many bytes to remove from the expression stack |
524 // of the bottom-most frame. They were used as materialization arguments. | 544 // of the bottom-most frame. They were used as materialization arguments. |
525 __ Pop(R1); | 545 __ Pop(R1); |
526 __ SmiUntag(R1); | 546 __ SmiUntag(R1); |
527 if (preserve_result) { | 547 if (kind == kLazyDeoptFromReturn) { |
528 __ Pop(R0); // Restore result. | 548 __ Pop(R0); // Restore result. |
| 549 } else if (kind == kLazyDeoptFromThrow) { |
| 550 __ Pop(R1); // Restore stacktrace. |
| 551 __ Pop(R0); // Restore exception. |
529 } | 552 } |
530 __ LeaveStubFrame(); | 553 __ LeaveStubFrame(); |
531 // Remove materialization arguments. | 554 // Remove materialization arguments. |
532 __ add(SP, SP, Operand(R1)); | 555 __ add(SP, SP, Operand(R1)); |
533 __ ret(); | 556 __ ret(); |
534 } | 557 } |
535 | 558 |
536 | 559 |
537 void StubCode::GenerateDeoptimizeLazyStub(Assembler* assembler) { | 560 // LR: return address + call-instruction-size |
| 561 // R0: result, must be preserved |
| 562 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { |
538 // Correct return address to point just after the call that is being | 563 // Correct return address to point just after the call that is being |
539 // deoptimized. | 564 // deoptimized. |
540 __ AddImmediate(LR, LR, -CallPattern::kDeoptCallLengthInBytes); | 565 __ AddImmediate(LR, LR, -CallPattern::kDeoptCallLengthInBytes); |
541 // Push zap value instead of CODE_REG for lazy deopt. | 566 // Push zap value instead of CODE_REG for lazy deopt. |
542 __ LoadImmediate(TMP, 0xf1f1f1f1); | 567 __ LoadImmediate(TMP, 0xf1f1f1f1); |
543 __ Push(TMP); | 568 __ Push(TMP); |
544 GenerateDeoptimizationSequence(assembler, kLazyDeopt); | 569 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); |
545 } | 570 } |
546 | 571 |
547 | 572 |
| 573 // LR: return address + call-instruction-size |
| 574 // R0: exception, must be preserved |
| 575 // R1: stacktrace, must be preserved |
| 576 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { |
| 577 // Correct return address to point just after the call that is being |
| 578 // deoptimized. |
| 579 __ AddImmediate(LR, LR, -CallPattern::kDeoptCallLengthInBytes); |
| 580 // Push zap value instead of CODE_REG for lazy deopt. |
| 581 __ LoadImmediate(TMP, 0xf1f1f1f1); |
| 582 __ Push(TMP); |
| 583 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); |
| 584 } |
| 585 |
| 586 |
548 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { | 587 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { |
549 GenerateDeoptimizationSequence(assembler, kEagerDeopt); | 588 GenerateDeoptimizationSequence(assembler, kEagerDeopt); |
550 } | 589 } |
551 | 590 |
552 | 591 |
553 static void GenerateDispatcherCode(Assembler* assembler, | 592 static void GenerateDispatcherCode(Assembler* assembler, |
554 Label* call_target_function) { | 593 Label* call_target_function) { |
555 __ Comment("NoSuchMethodDispatch"); | 594 __ Comment("NoSuchMethodDispatch"); |
556 // When lazily generated invocation dispatchers are disabled, the | 595 // When lazily generated invocation dispatchers are disabled, the |
557 // miss-handler may return null. | 596 // miss-handler may return null. |
(...skipping 1717 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2275 } | 2314 } |
2276 | 2315 |
2277 | 2316 |
2278 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { | 2317 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { |
2279 __ brk(0); | 2318 __ brk(0); |
2280 } | 2319 } |
2281 | 2320 |
2282 } // namespace dart | 2321 } // namespace dart |
2283 | 2322 |
2284 #endif // defined TARGET_ARCH_ARM64 | 2323 #endif // defined TARGET_ARCH_ARM64 |
OLD | NEW |