OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_ARM) | 6 #if defined(TARGET_ARCH_ARM) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/code_generator.h" | 9 #include "vm/code_generator.h" |
10 #include "vm/cpu.h" | 10 #include "vm/cpu.h" |
(...skipping 519 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
530 __ Pop(R2); | 530 __ Pop(R2); |
531 if (kind == kLazyDeoptFromReturn) { | 531 if (kind == kLazyDeoptFromReturn) { |
532 __ Pop(R0); // Restore result. | 532 __ Pop(R0); // Restore result. |
533 } else if (kind == kLazyDeoptFromThrow) { | 533 } else if (kind == kLazyDeoptFromThrow) { |
534 __ Pop(R1); // Restore stacktrace. | 534 __ Pop(R1); // Restore stacktrace. |
535 __ Pop(R0); // Restore exception. | 535 __ Pop(R0); // Restore exception. |
536 } | 536 } |
537 __ LeaveStubFrame(); | 537 __ LeaveStubFrame(); |
538 // Remove materialization arguments. | 538 // Remove materialization arguments. |
539 __ add(SP, SP, Operand(R2, ASR, kSmiTagSize)); | 539 __ add(SP, SP, Operand(R2, ASR, kSmiTagSize)); |
540 __ Ret(); | 540 // The caller is responsible for emitting the return instruction. |
541 } | 541 } |
542 | 542 |
543 | 543 |
544 // R0: result, must be preserved | 544 // R0: result, must be preserved |
545 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { | 545 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { |
546 // Push zap value instead of CODE_REG for lazy deopt. | 546 // Push zap value instead of CODE_REG for lazy deopt. |
547 __ LoadImmediate(IP, 0xf1f1f1f1); | 547 __ LoadImmediate(IP, kZapCodeReg); |
548 __ Push(IP); | 548 __ Push(IP); |
549 // Return address for "call" to deopt stub. | 549 // Return address for "call" to deopt stub. |
550 __ LoadImmediate(LR, 0xe1e1e1e1); | 550 __ LoadImmediate(LR, kZapReturnAddress); |
551 __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_return_stub_offset())); | 551 __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_return_stub_offset())); |
552 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); | 552 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); |
| 553 __ Ret(); |
553 } | 554 } |
554 | 555 |
555 | 556 |
556 // R0: exception, must be preserved | 557 // R0: exception, must be preserved |
557 // R1: stacktrace, must be preserved | 558 // R1: stacktrace, must be preserved |
558 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { | 559 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { |
559 // Push zap value instead of CODE_REG for lazy deopt. | 560 // Push zap value instead of CODE_REG for lazy deopt. |
560 __ LoadImmediate(IP, 0xf1f1f1f1); | 561 __ LoadImmediate(IP, kZapCodeReg); |
561 __ Push(IP); | 562 __ Push(IP); |
562 // Return address for "call" to deopt stub. | 563 // Return address for "call" to deopt stub. |
563 __ LoadImmediate(LR, 0xe1e1e1e1); | 564 __ LoadImmediate(LR, kZapReturnAddress); |
564 __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_throw_stub_offset())); | 565 __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_throw_stub_offset())); |
565 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); | 566 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); |
| 567 __ Ret(); |
566 } | 568 } |
567 | 569 |
568 | 570 |
569 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { | 571 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { |
570 GenerateDeoptimizationSequence(assembler, kEagerDeopt); | 572 GenerateDeoptimizationSequence(assembler, kEagerDeopt); |
| 573 __ Ret(); |
571 } | 574 } |
572 | 575 |
573 | 576 |
574 static void GenerateDispatcherCode(Assembler* assembler, | 577 static void GenerateDispatcherCode(Assembler* assembler, |
575 Label* call_target_function) { | 578 Label* call_target_function) { |
576 __ Comment("NoSuchMethodDispatch"); | 579 __ Comment("NoSuchMethodDispatch"); |
577 // When lazily generated invocation dispatchers are disabled, the | 580 // When lazily generated invocation dispatchers are disabled, the |
578 // miss-handler may return null. | 581 // miss-handler may return null. |
579 __ CompareObject(R0, Object::null_object()); | 582 __ CompareObject(R0, Object::null_object()); |
580 __ b(call_target_function, NE); | 583 __ b(call_target_function, NE); |
(...skipping 1311 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1892 __ StoreToOffset(kWord, R2, THR, Thread::active_exception_offset()); | 1895 __ StoreToOffset(kWord, R2, THR, Thread::active_exception_offset()); |
1893 | 1896 |
1894 // Stacktrace object. | 1897 // Stacktrace object. |
1895 __ LoadFromOffset(kWord, R1, THR, Thread::active_stacktrace_offset()); | 1898 __ LoadFromOffset(kWord, R1, THR, Thread::active_stacktrace_offset()); |
1896 __ StoreToOffset(kWord, R2, THR, Thread::active_stacktrace_offset()); | 1899 __ StoreToOffset(kWord, R2, THR, Thread::active_stacktrace_offset()); |
1897 | 1900 |
1898 __ bx(LR); // Jump to the exception handler code. | 1901 __ bx(LR); // Jump to the exception handler code. |
1899 } | 1902 } |
1900 | 1903 |
1901 | 1904 |
| 1905 // Deoptimize a frame on the call stack before rewinding. |
| 1906 // The arguments are stored in the Thread object. |
| 1907 // No result. |
| 1908 void StubCode::GenerateDeoptForRewindStub(Assembler* assembler) { |
| 1909 // Push zap value instead of CODE_REG. |
| 1910 __ LoadImmediate(IP, kZapCodeReg); |
| 1911 __ Push(IP); |
| 1912 |
| 1913 // Load the deopt pc into LR. |
| 1914 __ LoadFromOffset(kWord, LR, THR, Thread::resume_pc_offset()); |
| 1915 GenerateDeoptimizationSequence(assembler, kEagerDeopt); |
| 1916 |
| 1917 // After we have deoptimized, jump to the correct frame. |
| 1918 __ EnterStubFrame(); |
| 1919 __ CallRuntime(kRewindPostDeoptRuntimeEntry, 0); |
| 1920 __ LeaveStubFrame(); |
| 1921 __ bkpt(0); |
| 1922 } |
| 1923 |
| 1924 |
1902 // Calls to the runtime to optimize the given function. | 1925 // Calls to the runtime to optimize the given function. |
1903 // R8: function to be reoptimized. | 1926 // R8: function to be reoptimized. |
1904 // R4: argument descriptor (preserved). | 1927 // R4: argument descriptor (preserved). |
1905 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { | 1928 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { |
1906 __ EnterStubFrame(); | 1929 __ EnterStubFrame(); |
1907 __ Push(R4); | 1930 __ Push(R4); |
1908 __ LoadImmediate(IP, 0); | 1931 __ LoadImmediate(IP, 0); |
1909 __ Push(IP); // Setup space on stack for return value. | 1932 __ Push(IP); // Setup space on stack for return value. |
1910 __ Push(R8); | 1933 __ Push(R8); |
1911 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); | 1934 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); |
(...skipping 346 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2258 } | 2281 } |
2259 | 2282 |
2260 | 2283 |
2261 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { | 2284 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { |
2262 __ bkpt(0); | 2285 __ bkpt(0); |
2263 } | 2286 } |
2264 | 2287 |
2265 } // namespace dart | 2288 } // namespace dart |
2266 | 2289 |
2267 #endif // defined TARGET_ARCH_ARM | 2290 #endif // defined TARGET_ARCH_ARM |
OLD | NEW |