OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_MIPS) | 6 #if defined(TARGET_ARCH_MIPS) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/code_generator.h" | 9 #include "vm/code_generator.h" |
10 #include "vm/compiler.h" | 10 #include "vm/compiler.h" |
(...skipping 531 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
542 if (kind == kLazyDeoptFromReturn) { | 542 if (kind == kLazyDeoptFromReturn) { |
543 __ Pop(V0); // Restore result. | 543 __ Pop(V0); // Restore result. |
544 } else if (kind == kLazyDeoptFromThrow) { | 544 } else if (kind == kLazyDeoptFromThrow) { |
545 __ Pop(V1); // Restore stacktrace. | 545 __ Pop(V1); // Restore stacktrace. |
546 __ Pop(V0); // Restore exception. | 546 __ Pop(V0); // Restore exception. |
547 } | 547 } |
548 __ LeaveStubFrame(); | 548 __ LeaveStubFrame(); |
549 // Remove materialization arguments. | 549 // Remove materialization arguments. |
550 __ SmiUntag(T1); | 550 __ SmiUntag(T1); |
551 __ addu(SP, SP, T1); | 551 __ addu(SP, SP, T1); |
552 // The caller is responsible for emitting the return instruction. | 552 __ Ret(); |
553 } | 553 } |
554 | 554 |
555 // V0: result, must be preserved | 555 // V0: result, must be preserved |
556 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { | 556 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { |
557 // Push zap value instead of CODE_REG for lazy deopt. | 557 // Push zap value instead of CODE_REG for lazy deopt. |
558 __ LoadImmediate(TMP, kZapCodeReg); | 558 __ LoadImmediate(TMP, 0xf1f1f1f1); |
559 __ Push(TMP); | 559 __ Push(TMP); |
560 // Return address for "call" to deopt stub. | 560 // Return address for "call" to deopt stub. |
561 __ LoadImmediate(RA, kZapReturnAddress); | 561 __ LoadImmediate(RA, 0xe1e1e1e1); |
562 __ lw(CODE_REG, Address(THR, Thread::lazy_deopt_from_return_stub_offset())); | 562 __ lw(CODE_REG, Address(THR, Thread::lazy_deopt_from_return_stub_offset())); |
563 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); | 563 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromReturn); |
564 __ Ret(); | |
565 } | 564 } |
566 | 565 |
567 | 566 |
568 // V0: exception, must be preserved | 567 // V0: exception, must be preserved |
569 // V1: stacktrace, must be preserved | 568 // V1: stacktrace, must be preserved |
570 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { | 569 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { |
571 // Push zap value instead of CODE_REG for lazy deopt. | 570 // Push zap value instead of CODE_REG for lazy deopt. |
572 __ LoadImmediate(TMP, kZapCodeReg); | 571 __ LoadImmediate(TMP, 0xf1f1f1f1); |
573 __ Push(TMP); | 572 __ Push(TMP); |
574 // Return address for "call" to deopt stub. | 573 // Return address for "call" to deopt stub. |
575 __ LoadImmediate(RA, kZapReturnAddress); | 574 __ LoadImmediate(RA, 0xe1e1e1e1); |
576 __ lw(CODE_REG, Address(THR, Thread::lazy_deopt_from_throw_stub_offset())); | 575 __ lw(CODE_REG, Address(THR, Thread::lazy_deopt_from_throw_stub_offset())); |
577 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); | 576 GenerateDeoptimizationSequence(assembler, kLazyDeoptFromThrow); |
578 __ Ret(); | |
579 } | 577 } |
580 | 578 |
581 | 579 |
582 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { | 580 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { |
583 GenerateDeoptimizationSequence(assembler, kEagerDeopt); | 581 GenerateDeoptimizationSequence(assembler, kEagerDeopt); |
584 __ Ret(); | |
585 } | 582 } |
586 | 583 |
587 | 584 |
588 static void GenerateDispatcherCode(Assembler* assembler, | 585 static void GenerateDispatcherCode(Assembler* assembler, |
589 Label* call_target_function) { | 586 Label* call_target_function) { |
590 __ Comment("NoSuchMethodDispatch"); | 587 __ Comment("NoSuchMethodDispatch"); |
591 // When lazily generated invocation dispatchers are disabled, the | 588 // When lazily generated invocation dispatchers are disabled, the |
592 // miss-handler may return null. | 589 // miss-handler may return null. |
593 __ BranchNotEqual(T0, Object::null_object(), call_target_function); | 590 __ BranchNotEqual(T0, Object::null_object(), call_target_function); |
594 __ EnterStubFrame(); | 591 __ EnterStubFrame(); |
(...skipping 1429 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2024 | 2021 |
2025 // Load the stacktrace from the current thread. | 2022 // Load the stacktrace from the current thread. |
2026 Address stacktrace_addr(THR, Thread::active_stacktrace_offset()); | 2023 Address stacktrace_addr(THR, Thread::active_stacktrace_offset()); |
2027 __ lw(V1, stacktrace_addr); | 2024 __ lw(V1, stacktrace_addr); |
2028 | 2025 |
2029 __ jr(A0); // Jump to continuation point. | 2026 __ jr(A0); // Jump to continuation point. |
2030 __ delay_slot()->sw(A2, stacktrace_addr); | 2027 __ delay_slot()->sw(A2, stacktrace_addr); |
2031 } | 2028 } |
2032 | 2029 |
2033 | 2030 |
2034 // Deoptimize a frame on the call stack before rewinding. | |
2035 // The arguments are stored in the Thread object. | |
2036 // No result. | |
2037 void StubCode::GenerateDeoptForRewindStub(Assembler* assembler) { | |
2038 // Push zap value instead of CODE_REG. | |
2039 __ LoadImmediate(TMP, kZapCodeReg); | |
2040 __ Push(TMP); | |
2041 | |
2042 // Load the deopt pc into RA. | |
2043 __ lw(RA, Address(THR, Thread::resume_pc_offset())); | |
2044 GenerateDeoptimizationSequence(assembler, kEagerDeopt); | |
2045 | |
2046 // After we have deoptimized, jump to the correct frame. | |
2047 __ EnterStubFrame(); | |
2048 __ CallRuntime(kRewindPostDeoptRuntimeEntry, 0); | |
2049 __ LeaveStubFrame(); | |
2050 __ break_(0); | |
2051 } | |
2052 | |
2053 | |
2054 // Calls to the runtime to optimize the given function. | 2031 // Calls to the runtime to optimize the given function. |
2055 // T0: function to be reoptimized. | 2032 // T0: function to be reoptimized. |
2056 // S4: argument descriptor (preserved). | 2033 // S4: argument descriptor (preserved). |
2057 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { | 2034 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { |
2058 __ Comment("OptimizeFunctionStub"); | 2035 __ Comment("OptimizeFunctionStub"); |
2059 __ EnterStubFrame(); | 2036 __ EnterStubFrame(); |
2060 __ addiu(SP, SP, Immediate(-3 * kWordSize)); | 2037 __ addiu(SP, SP, Immediate(-3 * kWordSize)); |
2061 __ sw(S4, Address(SP, 2 * kWordSize)); | 2038 __ sw(S4, Address(SP, 2 * kWordSize)); |
2062 // Setup space on stack for return value. | 2039 // Setup space on stack for return value. |
2063 __ sw(ZR, Address(SP, 1 * kWordSize)); | 2040 __ sw(ZR, Address(SP, 1 * kWordSize)); |
(...skipping 357 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2421 } | 2398 } |
2422 | 2399 |
2423 | 2400 |
2424 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { | 2401 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { |
2425 __ break_(0); | 2402 __ break_(0); |
2426 } | 2403 } |
2427 | 2404 |
2428 } // namespace dart | 2405 } // namespace dart |
2429 | 2406 |
2430 #endif // defined TARGET_ARCH_MIPS | 2407 #endif // defined TARGET_ARCH_MIPS |
OLD | NEW |