OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_ARM) | 6 #if defined(TARGET_ARCH_ARM) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/code_generator.h" | 9 #include "vm/code_generator.h" |
10 #include "vm/cpu.h" | 10 #include "vm/cpu.h" |
(...skipping 519 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
530 __ Pop(R2); | 530 __ Pop(R2); |
531 if (kind == kLazyDeoptFromReturn) { | 531 if (kind == kLazyDeoptFromReturn) { |
532 __ Pop(R0); // Restore result. | 532 __ Pop(R0); // Restore result. |
533 } else if (kind == kLazyDeoptFromThrow) { | 533 } else if (kind == kLazyDeoptFromThrow) { |
534 __ Pop(R1); // Restore stacktrace. | 534 __ Pop(R1); // Restore stacktrace. |
535 __ Pop(R0); // Restore exception. | 535 __ Pop(R0); // Restore exception. |
536 } | 536 } |
537 __ LeaveStubFrame(); | 537 __ LeaveStubFrame(); |
538 // Remove materialization arguments. | 538 // Remove materialization arguments. |
539 __ add(SP, SP, Operand(R2, ASR, kSmiTagSize)); | 539 __ add(SP, SP, Operand(R2, ASR, kSmiTagSize)); |
540 // The caller is responsible for emitting the return instruction. | 540 __ Ret(); |
541 } | 541 } |
542 | 542 |
543 | 543 |
544 // R0: result, must be preserved | 544 // R0: result, must be preserved |
545 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { | 545 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { |
546 // Push zap value instead of CODE_REG for lazy deopt. | 546 // Push zap value instead of CODE_REG for lazy deopt. |
547 __ LoadImmediate(IP, kZapCodeReg); | 547 __ LoadImmediate(IP, 0xf1f1f1f1); |
548 __ Push(IP); | 548 __ Push(IP); |
549 // Return address for "call" to deopt stub. | 549 // Return address for "call" to deopt stub. |
550 __ LoadImmediate(LR, kZapReturnAddress); | 550 __ LoadImmediate(LR, 0xe1e1e1e1); |
551 __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_return_stub_offset())); | 551 __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_return_stub_offset())); |
552 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); | 552 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); |
553 __ Ret(); | |
554 } | 553 } |
555 | 554 |
556 | 555 |
557 // R0: exception, must be preserved | 556 // R0: exception, must be preserved |
558 // R1: stacktrace, must be preserved | 557 // R1: stacktrace, must be preserved |
559 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { | 558 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { |
560 // Push zap value instead of CODE_REG for lazy deopt. | 559 // Push zap value instead of CODE_REG for lazy deopt. |
561 __ LoadImmediate(IP, kZapCodeReg); | 560 __ LoadImmediate(IP, 0xf1f1f1f1); |
562 __ Push(IP); | 561 __ Push(IP); |
563 // Return address for "call" to deopt stub. | 562 // Return address for "call" to deopt stub. |
564 __ LoadImmediate(LR, kZapReturnAddress); | 563 __ LoadImmediate(LR, 0xe1e1e1e1); |
565 __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_throw_stub_offset())); | 564 __ ldr(CODE_REG, Address(THR, Thread::lazy_deopt_from_throw_stub_offset())); |
566 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); | 565 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); |
567 __ Ret(); | |
568 } | 566 } |
569 | 567 |
570 | 568 |
571 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { | 569 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { |
572 GenerateDeoptimizationSequence(assembler, kEagerDeopt); | 570 GenerateDeoptimizationSequence(assembler, kEagerDeopt); |
573 __ Ret(); | |
574 } | 571 } |
575 | 572 |
576 | 573 |
577 static void GenerateDispatcherCode(Assembler* assembler, | 574 static void GenerateDispatcherCode(Assembler* assembler, |
578 Label* call_target_function) { | 575 Label* call_target_function) { |
579 __ Comment("NoSuchMethodDispatch"); | 576 __ Comment("NoSuchMethodDispatch"); |
580 // When lazily generated invocation dispatchers are disabled, the | 577 // When lazily generated invocation dispatchers are disabled, the |
581 // miss-handler may return null. | 578 // miss-handler may return null. |
582 __ CompareObject(R0, Object::null_object()); | 579 __ CompareObject(R0, Object::null_object()); |
583 __ b(call_target_function, NE); | 580 __ b(call_target_function, NE); |
(...skipping 1311 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1895 __ StoreToOffset(kWord, R2, THR, Thread::active_exception_offset()); | 1892 __ StoreToOffset(kWord, R2, THR, Thread::active_exception_offset()); |
1896 | 1893 |
1897 // Stacktrace object. | 1894 // Stacktrace object. |
1898 __ LoadFromOffset(kWord, R1, THR, Thread::active_stacktrace_offset()); | 1895 __ LoadFromOffset(kWord, R1, THR, Thread::active_stacktrace_offset()); |
1899 __ StoreToOffset(kWord, R2, THR, Thread::active_stacktrace_offset()); | 1896 __ StoreToOffset(kWord, R2, THR, Thread::active_stacktrace_offset()); |
1900 | 1897 |
1901 __ bx(LR); // Jump to the exception handler code. | 1898 __ bx(LR); // Jump to the exception handler code. |
1902 } | 1899 } |
1903 | 1900 |
1904 | 1901 |
1905 // Deoptimize a frame on the call stack before rewinding. | |
1906 // The arguments are stored in the Thread object. | |
1907 // No result. | |
1908 void StubCode::GenerateDeoptForRewindStub(Assembler* assembler) { | |
1909 // Push zap value instead of CODE_REG. | |
1910 __ LoadImmediate(IP, kZapCodeReg); | |
1911 __ Push(IP); | |
1912 | |
1913 // Load the deopt pc into LR. | |
1914 __ LoadFromOffset(kWord, LR, THR, Thread::resume_pc_offset()); | |
1915 GenerateDeoptimizationSequence(assembler, kEagerDeopt); | |
1916 | |
1917 // After we have deoptimized, jump to the correct frame. | |
1918 __ EnterStubFrame(); | |
1919 __ CallRuntime(kRewindPostDeoptRuntimeEntry, 0); | |
1920 __ LeaveStubFrame(); | |
1921 __ bkpt(0); | |
1922 } | |
1923 | |
1924 | |
1925 // Calls to the runtime to optimize the given function. | 1902 // Calls to the runtime to optimize the given function. |
1926 // R8: function to be reoptimized. | 1903 // R8: function to be reoptimized. |
1927 // R4: argument descriptor (preserved). | 1904 // R4: argument descriptor (preserved). |
1928 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { | 1905 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { |
1929 __ EnterStubFrame(); | 1906 __ EnterStubFrame(); |
1930 __ Push(R4); | 1907 __ Push(R4); |
1931 __ LoadImmediate(IP, 0); | 1908 __ LoadImmediate(IP, 0); |
1932 __ Push(IP); // Setup space on stack for return value. | 1909 __ Push(IP); // Setup space on stack for return value. |
1933 __ Push(R8); | 1910 __ Push(R8); |
1934 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); | 1911 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1); |
(...skipping 346 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2281 } | 2258 } |
2282 | 2259 |
2283 | 2260 |
2284 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { | 2261 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { |
2285 __ bkpt(0); | 2262 __ bkpt(0); |
2286 } | 2263 } |
2287 | 2264 |
2288 } // namespace dart | 2265 } // namespace dart |
2289 | 2266 |
2290 #endif // defined TARGET_ARCH_ARM | 2267 #endif // defined TARGET_ARCH_ARM |
OLD | NEW |