OLD | NEW |
1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_ARM64) | 6 #if defined(TARGET_ARCH_ARM64) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/code_generator.h" | 9 #include "vm/code_generator.h" |
10 #include "vm/compiler.h" | 10 #include "vm/compiler.h" |
(...skipping 540 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
551 __ SmiUntag(R2); | 551 __ SmiUntag(R2); |
552 if (kind == kLazyDeoptFromReturn) { | 552 if (kind == kLazyDeoptFromReturn) { |
553 __ Pop(R0); // Restore result. | 553 __ Pop(R0); // Restore result. |
554 } else if (kind == kLazyDeoptFromThrow) { | 554 } else if (kind == kLazyDeoptFromThrow) { |
555 __ Pop(R1); // Restore stacktrace. | 555 __ Pop(R1); // Restore stacktrace. |
556 __ Pop(R0); // Restore exception. | 556 __ Pop(R0); // Restore exception. |
557 } | 557 } |
558 __ LeaveStubFrame(); | 558 __ LeaveStubFrame(); |
559 // Remove materialization arguments. | 559 // Remove materialization arguments. |
560 __ add(SP, SP, Operand(R2)); | 560 __ add(SP, SP, Operand(R2)); |
561 __ ret(); | 561 // The caller is responsible for emitting the return instruction. |
562 } | 562 } |
563 | 563 |
564 | 564 |
565 // R0: result, must be preserved | 565 // R0: result, must be preserved |
566 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { | 566 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { |
567 // Push zap value instead of CODE_REG for lazy deopt. | 567 // Push zap value instead of CODE_REG for lazy deopt. |
568 __ LoadImmediate(TMP, 0xf1f1f1f1); | 568 __ LoadImmediate(TMP, kZapCodeReg); |
569 __ Push(TMP); | 569 __ Push(TMP); |
570 // Return address for "call" to deopt stub. | 570 // Return address for "call" to deopt stub. |
571 __ LoadImmediate(LR, 0xe1e1e1e1); | 571 __ LoadImmediate(LR, kZapReturnAddress); |
572 __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_return_stub_offset())); | 572 __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_return_stub_offset())); |
573 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); | 573 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); |
| 574 __ ret(); |
574 } | 575 } |
575 | 576 |
576 | 577 |
577 // R0: exception, must be preserved | 578 // R0: exception, must be preserved |
578 // R1: stacktrace, must be preserved | 579 // R1: stacktrace, must be preserved |
579 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { | 580 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { |
580 // Push zap value instead of CODE_REG for lazy deopt. | 581 // Push zap value instead of CODE_REG for lazy deopt. |
581 __ LoadImmediate(TMP, 0xf1f1f1f1); | 582 __ LoadImmediate(TMP, kZapCodeReg); |
582 __ Push(TMP); | 583 __ Push(TMP); |
583 // Return address for "call" to deopt stub. | 584 // Return address for "call" to deopt stub. |
584 __ LoadImmediate(LR, 0xe1e1e1e1); | 585 __ LoadImmediate(LR, kZapReturnAddress); |
585 __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_throw_stub_offset())); | 586 __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_throw_stub_offset())); |
586 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); | 587 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); |
| 588 __ ret(); |
587 } | 589 } |
588 | 590 |
589 | 591 |
590 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { | 592 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { |
591 GenerateDeoptimizationSequence(assembler, kEagerDeopt); | 593 GenerateDeoptimizationSequence(assembler, kEagerDeopt); |
| 594 __ ret(); |
592 } | 595 } |
593 | 596 |
594 | 597 |
595 static void GenerateDispatcherCode(Assembler* assembler, | 598 static void GenerateDispatcherCode(Assembler* assembler, |
596 Label* call_target_function) { | 599 Label* call_target_function) { |
597 __ Comment("NoSuchMethodDispatch"); | 600 __ Comment("NoSuchMethodDispatch"); |
598 // When lazily generated invocation dispatchers are disabled, the | 601 // When lazily generated invocation dispatchers are disabled, the |
599 // miss-handler may return null. | 602 // miss-handler may return null. |
600 __ CompareObject(R0, Object::null_object()); | 603 __ CompareObject(R0, Object::null_object()); |
601 __ b(call_target_function, NE); | 604 __ b(call_target_function, NE); |
(...skipping 1340 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1942 __ StoreToOffset(R2, THR, Thread::active_exception_offset()); | 1945 __ StoreToOffset(R2, THR, Thread::active_exception_offset()); |
1943 | 1946 |
1944 // Stacktrace object. | 1947 // Stacktrace object. |
1945 __ LoadFromOffset(R1, THR, Thread::active_stacktrace_offset()); | 1948 __ LoadFromOffset(R1, THR, Thread::active_stacktrace_offset()); |
1946 __ StoreToOffset(R2, THR, Thread::active_stacktrace_offset()); | 1949 __ StoreToOffset(R2, THR, Thread::active_stacktrace_offset()); |
1947 | 1950 |
1948 __ ret(); // Jump to the exception handler code. | 1951 __ ret(); // Jump to the exception handler code. |
1949 } | 1952 } |
1950 | 1953 |
1951 | 1954 |
| 1955 // Deoptimize a frame on the call stack before rewinding. |
| 1956 // The arguments are stored in the Thread object. |
| 1957 // No result. |
| 1958 void StubCode::GenerateDeoptForRewindStub(Assembler* assembler) { |
| 1959 // Push zap value instead of CODE_REG. |
| 1960 __ LoadImmediate(TMP, kZapCodeReg); |
| 1961 __ Push(TMP); |
| 1962 |
| 1963 // Load the deopt pc into LR. |
| 1964 __ LoadFromOffset(LR, THR, Thread::resume_pc_offset()); |
| 1965 GenerateDeoptimizationSequence(assembler, kEagerDeopt); |
| 1966 |
| 1967 // After we have deoptimized, jump to the correct frame. |
| 1968 __ EnterStubFrame(); |
| 1969 __ CallRuntime(kRewindPostDeoptRuntimeEntry, 0); |
| 1970 __ LeaveStubFrame(); |
| 1971 __ brk(0); |
| 1972 } |
| 1973 |
| 1974 |
1952 // Calls to the runtime to optimize the given function. | 1975 // Calls to the runtime to optimize the given function. |
1953 // R6: function to be re-optimized. | 1976 // R6: function to be re-optimized. |
1954 // R4: argument descriptor (preserved). | 1977 // R4: argument descriptor (preserved). |
1955 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { | 1978 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { |
1956 __ EnterStubFrame(); | 1979 __ EnterStubFrame(); |
1957 __ Push(R4); | 1980 __ Push(R4); |
1958 // Setup space on stack for the return value. | 1981 // Setup space on stack for the return value. |
1959 __ Push(ZR); | 1982 __ Push(ZR); |
1960 __ Push(R6); | 1983 __ Push(R6); |
1961 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); | 1984 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); |
(...skipping 352 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2314 } | 2337 } |
2315 | 2338 |
2316 | 2339 |
2317 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { | 2340 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { |
2318 __ brk(0); | 2341 __ brk(0); |
2319 } | 2342 } |
2320 | 2343 |
2321 } // namespace dart | 2344 } // namespace dart |
2322 | 2345 |
2323 #endif // defined TARGET_ARCH_ARM64 | 2346 #endif // defined TARGET_ARCH_ARM64 |
OLD | NEW |