| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
| 6 #if defined(TARGET_ARCH_ARM) | 6 #if defined(TARGET_ARCH_ARM) |
| 7 | 7 |
| 8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
| 9 #include "vm/code_generator.h" | 9 #include "vm/code_generator.h" |
| 10 #include "vm/cpu.h" | 10 #include "vm/cpu.h" |
| (...skipping 409 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 420 COMPILE_ASSERT(PP < CODE_REG); | 420 COMPILE_ASSERT(PP < CODE_REG); |
| 421 COMPILE_ASSERT(CODE_REG < FP); | 421 COMPILE_ASSERT(CODE_REG < FP); |
| 422 COMPILE_ASSERT(FP < IP); | 422 COMPILE_ASSERT(FP < IP); |
| 423 __ EnterFrame((1 << PP) | (1 << CODE_REG) | (1 << FP) | (1 << IP), 0); | 423 __ EnterFrame((1 << PP) | (1 << CODE_REG) | (1 << FP) | (1 << IP), 0); |
| 424 __ LoadPoolPointer(); | 424 __ LoadPoolPointer(); |
| 425 | 425 |
| 426 // The code in this frame may not cause GC. kDeoptimizeCopyFrameRuntimeEntry | 426 // The code in this frame may not cause GC. kDeoptimizeCopyFrameRuntimeEntry |
| 427 // and kDeoptimizeFillFrameRuntimeEntry are leaf runtime calls. | 427 // and kDeoptimizeFillFrameRuntimeEntry are leaf runtime calls. |
| 428 const intptr_t saved_result_slot_from_fp = | 428 const intptr_t saved_result_slot_from_fp = |
| 429 kFirstLocalSlotFromFp + 1 - (kNumberOfCpuRegisters - R0); | 429 kFirstLocalSlotFromFp + 1 - (kNumberOfCpuRegisters - R0); |
| 430 const intptr_t saved_exception_slot_from_fp = |
| 431 kFirstLocalSlotFromFp + 1 - (kNumberOfCpuRegisters - R0); |
| 432 const intptr_t saved_stacktrace_slot_from_fp = |
| 433 kFirstLocalSlotFromFp + 1 - (kNumberOfCpuRegisters - R1); |
| 430 // Result in R0 is preserved as part of pushing all registers below. | 434 // Result in R0 is preserved as part of pushing all registers below. |
| 431 | 435 |
| 432 // Push registers in their enumeration order: lowest register number at | 436 // Push registers in their enumeration order: lowest register number at |
| 433 // lowest address. | 437 // lowest address. |
| 434 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; --i) { | 438 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; --i) { |
| 435 if (i == CODE_REG) { | 439 if (i == CODE_REG) { |
| 436 // Save the original value of CODE_REG pushed before invoking this stub | 440 // Save the original value of CODE_REG pushed before invoking this stub |
| 437 // instead of the value used to call this stub. | 441 // instead of the value used to call this stub. |
| 438 __ ldr(IP, Address(FP, kCallerSpSlotFromFp * kWordSize)); | 442 __ ldr(IP, Address(FP, kCallerSpSlotFromFp * kWordSize)); |
| 439 __ Push(IP); | 443 __ Push(IP); |
| (...skipping 12 matching lines...) Expand all Loading... |
| 452 __ vstmd(DB_W, SP, D16, kNumberOfDRegisters - 16); | 456 __ vstmd(DB_W, SP, D16, kNumberOfDRegisters - 16); |
| 453 __ vstmd(DB_W, SP, D0, 16); | 457 __ vstmd(DB_W, SP, D0, 16); |
| 454 } else { | 458 } else { |
| 455 __ vstmd(DB_W, SP, D0, kNumberOfDRegisters); | 459 __ vstmd(DB_W, SP, D0, kNumberOfDRegisters); |
| 456 } | 460 } |
| 457 } else { | 461 } else { |
| 458 __ AddImmediate(SP, SP, -kNumberOfFpuRegisters * kFpuRegisterSize); | 462 __ AddImmediate(SP, SP, -kNumberOfFpuRegisters * kFpuRegisterSize); |
| 459 } | 463 } |
| 460 | 464 |
| 461 __ mov(R0, Operand(SP)); // Pass address of saved registers block. | 465 __ mov(R0, Operand(SP)); // Pass address of saved registers block. |
| 462 __ mov(R1, Operand(kind == kLazyDeopt ? 1 : 0)); | 466 bool is_lazy = (kind == kLazyDeoptFromReturn) || |
| 467 (kind == kLazyDeoptFromThrow); |
| 468 __ mov(R1, Operand(is_lazy ? 1 : 0)); |
| 463 __ ReserveAlignedFrameSpace(0); | 469 __ ReserveAlignedFrameSpace(0); |
| 464 __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry, 2); | 470 __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry, 2); |
| 465 // Result (R0) is stack-size (FP - SP) in bytes. | 471 // Result (R0) is stack-size (FP - SP) in bytes. |
| 466 | 472 |
| 467 const bool preserve_result = (kind == kLazyDeopt); | 473 if (kind == kLazyDeoptFromReturn) { |
| 468 if (preserve_result) { | |
| 469 // Restore result into R1 temporarily. | 474 // Restore result into R1 temporarily. |
| 470 __ ldr(R1, Address(FP, saved_result_slot_from_fp * kWordSize)); | 475 __ ldr(R1, Address(FP, saved_result_slot_from_fp * kWordSize)); |
| 476 } else if (kind == kLazyDeoptFromThrow) { |
| 477 // Restore result into R1 temporarily. |
| 478 __ ldr(R1, Address(FP, saved_exception_slot_from_fp * kWordSize)); |
| 479 __ ldr(R2, Address(FP, saved_stacktrace_slot_from_fp * kWordSize)); |
| 471 } | 480 } |
| 472 | 481 |
| 473 __ RestoreCodePointer(); | 482 __ RestoreCodePointer(); |
| 474 __ LeaveDartFrame(); | 483 __ LeaveDartFrame(); |
| 475 __ sub(SP, FP, Operand(R0)); | 484 __ sub(SP, FP, Operand(R0)); |
| 476 | 485 |
| 477 // DeoptimizeFillFrame expects a Dart frame, i.e. EnterDartFrame(0), but there | 486 // DeoptimizeFillFrame expects a Dart frame, i.e. EnterDartFrame(0), but there |
| 478 // is no need to set the correct PC marker or load PP, since they get patched. | 487 // is no need to set the correct PC marker or load PP, since they get patched. |
| 479 __ EnterStubFrame(); | 488 __ EnterStubFrame(); |
| 480 __ mov(R0, Operand(FP)); // Get last FP address. | 489 __ mov(R0, Operand(FP)); // Get last FP address. |
| 481 if (preserve_result) { | 490 if (kind == kLazyDeoptFromReturn) { |
| 482 __ Push(R1); // Preserve result as first local. | 491 __ Push(R1); // Preserve result as first local. |
| 492 } else if (kind == kLazyDeoptFromThrow) { |
| 493 __ Push(R1); // Preserve exception as first local. |
| 494 __ Push(R2); // Preserve stacktrace as second local. |
| 483 } | 495 } |
| 484 __ ReserveAlignedFrameSpace(0); | 496 __ ReserveAlignedFrameSpace(0); |
| 485 __ CallRuntime(kDeoptimizeFillFrameRuntimeEntry, 1); // Pass last FP in R0. | 497 __ CallRuntime(kDeoptimizeFillFrameRuntimeEntry, 1); // Pass last FP in R0. |
| 486 if (preserve_result) { | 498 if (kind == kLazyDeoptFromReturn) { |
| 487 // Restore result into R1. | 499 // Restore result into R1. |
| 488 __ ldr(R1, Address(FP, kFirstLocalSlotFromFp * kWordSize)); | 500 __ ldr(R1, Address(FP, kFirstLocalSlotFromFp * kWordSize)); |
| 501 } else if (kind == kLazyDeoptFromThrow) { |
| 502 // Restore result into R1. |
| 503 __ ldr(R1, Address(FP, kFirstLocalSlotFromFp * kWordSize)); |
| 504 __ ldr(R2, Address(FP, (kFirstLocalSlotFromFp - 1) * kWordSize)); |
| 489 } | 505 } |
| 490 // Code above cannot cause GC. | 506 // Code above cannot cause GC. |
| 491 __ RestoreCodePointer(); | 507 __ RestoreCodePointer(); |
| 492 __ LeaveStubFrame(); | 508 __ LeaveStubFrame(); |
| 493 | 509 |
| 494 // Frame is fully rewritten at this point and it is safe to perform a GC. | 510 // Frame is fully rewritten at this point and it is safe to perform a GC. |
| 495 // Materialize any objects that were deferred by FillFrame because they | 511 // Materialize any objects that were deferred by FillFrame because they |
| 496 // require allocation. | 512 // require allocation. |
| 497 // Enter stub frame with loading PP. The caller's PP is not materialized yet. | 513 // Enter stub frame with loading PP. The caller's PP is not materialized yet. |
| 498 __ EnterStubFrame(); | 514 __ EnterStubFrame(); |
| 499 if (preserve_result) { | 515 if (kind == kLazyDeoptFromReturn) { |
| 500 __ Push(R1); // Preserve result, it will be GC-d here. | 516 __ Push(R1); // Preserve result, it will be GC-d here. |
| 517 } else if (kind == kLazyDeoptFromThrow) { |
| 518 __ Push(R1); // Preserve exception, it will be GC-d here. |
| 519 __ Push(R2); // Preserve stacktrace, it will be GC-d here. |
| 501 } | 520 } |
| 502 __ PushObject(Smi::ZoneHandle()); // Space for the result. | 521 __ PushObject(Smi::ZoneHandle()); // Space for the result. |
| 503 __ CallRuntime(kDeoptimizeMaterializeRuntimeEntry, 0); | 522 __ CallRuntime(kDeoptimizeMaterializeRuntimeEntry, 0); |
| 504 // Result tells stub how many bytes to remove from the expression stack | 523 // Result tells stub how many bytes to remove from the expression stack |
| 505 // of the bottom-most frame. They were used as materialization arguments. | 524 // of the bottom-most frame. They were used as materialization arguments. |
| 506 __ Pop(R1); | 525 __ Pop(R1); |
| 507 if (preserve_result) { | 526 if (kind == kLazyDeoptFromReturn) { |
| 508 __ Pop(R0); // Restore result. | 527 __ Pop(R0); // Restore result. |
| 528 } else if (kind == kLazyDeoptFromThrow) { |
| 529 __ Pop(R1); // Restore stacktrace. |
| 530 __ Pop(R0); // Restore exception. |
| 509 } | 531 } |
| 510 __ LeaveStubFrame(); | 532 __ LeaveStubFrame(); |
| 511 // Remove materialization arguments. | 533 // Remove materialization arguments. |
| 512 __ add(SP, SP, Operand(R1, ASR, kSmiTagSize)); | 534 __ add(SP, SP, Operand(R1, ASR, kSmiTagSize)); |
| 513 __ Ret(); | 535 __ Ret(); |
| 514 } | 536 } |
| 515 | 537 |
| 516 | 538 |
| 517 void StubCode::GenerateDeoptimizeLazyStub(Assembler* assembler) { | 539 // LR: return address + call-instruction-size |
| 540 // R0: result, must be preserved |
| 541 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { |
| 518 // Correct return address to point just after the call that is being | 542 // Correct return address to point just after the call that is being |
| 519 // deoptimized. | 543 // deoptimized. |
| 520 __ AddImmediate(LR, -CallPattern::DeoptCallPatternLengthInBytes()); | 544 __ AddImmediate(LR, -CallPattern::DeoptCallPatternLengthInBytes()); |
| 521 // Push zap value instead of CODE_REG for lazy deopt. | 545 // Push zap value instead of CODE_REG for lazy deopt. |
| 522 __ LoadImmediate(IP, 0xf1f1f1f1); | 546 __ LoadImmediate(IP, 0xf1f1f1f1); |
| 523 __ Push(IP); | 547 __ Push(IP); |
| 524 GenerateDeoptimizationSequence(assembler, kLazyDeopt); | 548 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); |
| 525 } | 549 } |
| 526 | 550 |
| 527 | 551 |
| 552 // LR: return address + call-instruction-size |
| 553 // R0: exception, must be preserved |
| 554 // R1: stacktrace, must be preserved |
| 555 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { |
| 556 // Correct return address to point just after the call that is being |
| 557 // deoptimized. |
| 558 __ AddImmediate(LR, -CallPattern::DeoptCallPatternLengthInBytes()); |
| 559 // Push zap value instead of CODE_REG for lazy deopt. |
| 560 __ LoadImmediate(IP, 0xf1f1f1f1); |
| 561 __ Push(IP); |
| 562 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); |
| 563 } |
| 564 |
| 565 |
| 528 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { | 566 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { |
| 529 GenerateDeoptimizationSequence(assembler, kEagerDeopt); | 567 GenerateDeoptimizationSequence(assembler, kEagerDeopt); |
| 530 } | 568 } |
| 531 | 569 |
| 532 | 570 |
| 533 static void GenerateDispatcherCode(Assembler* assembler, | 571 static void GenerateDispatcherCode(Assembler* assembler, |
| 534 Label* call_target_function) { | 572 Label* call_target_function) { |
| 535 __ Comment("NoSuchMethodDispatch"); | 573 __ Comment("NoSuchMethodDispatch"); |
| 536 // When lazily generated invocation dispatchers are disabled, the | 574 // When lazily generated invocation dispatchers are disabled, the |
| 537 // miss-handler may return null. | 575 // miss-handler may return null. |
| (...skipping 1680 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2218 } | 2256 } |
| 2219 | 2257 |
| 2220 | 2258 |
| 2221 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { | 2259 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { |
| 2222 __ bkpt(0); | 2260 __ bkpt(0); |
| 2223 } | 2261 } |
| 2224 | 2262 |
| 2225 } // namespace dart | 2263 } // namespace dart |
| 2226 | 2264 |
| 2227 #endif // defined TARGET_ARCH_ARM | 2265 #endif // defined TARGET_ARCH_ARM |
| OLD | NEW |