| OLD | NEW |
| 1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
| 6 #if defined(TARGET_ARCH_DBC) | 6 #if defined(TARGET_ARCH_DBC) |
| 7 | 7 |
| 8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
| 9 #include "vm/compiler.h" | 9 #include "vm/compiler.h" |
| 10 #include "vm/cpu.h" | 10 #include "vm/cpu.h" |
| (...skipping 15 matching lines...) Expand all Loading... |
| 26 DEFINE_FLAG(bool, | 26 DEFINE_FLAG(bool, |
| 27 use_slow_path, | 27 use_slow_path, |
| 28 false, | 28 false, |
| 29 "Set to true for debugging & verifying the slow paths."); | 29 "Set to true for debugging & verifying the slow paths."); |
| 30 DECLARE_FLAG(bool, trace_optimized_ic_calls); | 30 DECLARE_FLAG(bool, trace_optimized_ic_calls); |
| 31 | 31 |
| 32 void StubCode::GenerateLazyCompileStub(Assembler* assembler) { | 32 void StubCode::GenerateLazyCompileStub(Assembler* assembler) { |
| 33 __ Compile(); | 33 __ Compile(); |
| 34 } | 34 } |
| 35 | 35 |
| 36 | |
| 37 // Not executed, but used as a stack marker when calling | 36 // Not executed, but used as a stack marker when calling |
| 38 // DRT_OptimizeInvokedFunction. | 37 // DRT_OptimizeInvokedFunction. |
| 39 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { | 38 void StubCode::GenerateOptimizeFunctionStub(Assembler* assembler) { |
| 40 __ Trap(); | 39 __ Trap(); |
| 41 } | 40 } |
| 42 | 41 |
| 43 | |
| 44 // Not executed, but used as a sentinel in Simulator::JumpToFrame. | 42 // Not executed, but used as a sentinel in Simulator::JumpToFrame. |
| 45 void StubCode::GenerateRunExceptionHandlerStub(Assembler* assembler) { | 43 void StubCode::GenerateRunExceptionHandlerStub(Assembler* assembler) { |
| 46 __ Trap(); | 44 __ Trap(); |
| 47 } | 45 } |
| 48 | 46 |
| 49 | |
| 50 void StubCode::GenerateDeoptForRewindStub(Assembler* assembler) { | 47 void StubCode::GenerateDeoptForRewindStub(Assembler* assembler) { |
| 51 __ DeoptRewind(); | 48 __ DeoptRewind(); |
| 52 } | 49 } |
| 53 | 50 |
| 54 | |
| 55 // TODO(vegorov) Don't generate this stub. | 51 // TODO(vegorov) Don't generate this stub. |
| 56 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { | 52 void StubCode::GenerateFixCallersTargetStub(Assembler* assembler) { |
| 57 __ Trap(); | 53 __ Trap(); |
| 58 } | 54 } |
| 59 | 55 |
| 60 | |
| 61 // TODO(vegorov) Don't generate these stubs. | 56 // TODO(vegorov) Don't generate these stubs. |
| 62 void StubCode::GenerateAllocationStubForClass(Assembler* assembler, | 57 void StubCode::GenerateAllocationStubForClass(Assembler* assembler, |
| 63 const Class& cls) { | 58 const Class& cls) { |
| 64 __ Trap(); | 59 __ Trap(); |
| 65 } | 60 } |
| 66 | 61 |
| 67 | |
| 68 // TODO(vegorov) Don't generate this stub. | 62 // TODO(vegorov) Don't generate this stub. |
| 69 void StubCode::GenerateMegamorphicMissStub(Assembler* assembler) { | 63 void StubCode::GenerateMegamorphicMissStub(Assembler* assembler) { |
| 70 __ Trap(); | 64 __ Trap(); |
| 71 } | 65 } |
| 72 | 66 |
| 73 | |
| 74 // These deoptimization stubs are only used to populate stack frames | 67 // These deoptimization stubs are only used to populate stack frames |
| 75 // with something meaningful to make sure GC can scan the stack during | 68 // with something meaningful to make sure GC can scan the stack during |
| 76 // the last phase of deoptimization which materializes objects. | 69 // the last phase of deoptimization which materializes objects. |
| 77 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { | 70 void StubCode::GenerateDeoptimizeLazyFromReturnStub(Assembler* assembler) { |
| 78 __ Trap(); | 71 __ Trap(); |
| 79 } | 72 } |
| 80 | 73 |
| 81 | |
| 82 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { | 74 void StubCode::GenerateDeoptimizeLazyFromThrowStub(Assembler* assembler) { |
| 83 __ Trap(); | 75 __ Trap(); |
| 84 } | 76 } |
| 85 | 77 |
| 86 | |
| 87 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { | 78 void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { |
| 88 __ Trap(); | 79 __ Trap(); |
| 89 } | 80 } |
| 90 | 81 |
| 91 | |
| 92 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { | 82 void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { |
| 93 __ Trap(); | 83 __ Trap(); |
| 94 } | 84 } |
| 95 | 85 |
| 96 | |
| 97 void StubCode::GenerateAsynchronousGapMarkerStub(Assembler* assembler) { | 86 void StubCode::GenerateAsynchronousGapMarkerStub(Assembler* assembler) { |
| 98 __ Trap(); | 87 __ Trap(); |
| 99 } | 88 } |
| 100 | 89 |
| 101 | |
| 102 // Print the stop message. | 90 // Print the stop message. |
| 103 DEFINE_LEAF_RUNTIME_ENTRY(void, PrintStopMessage, 1, const char* message) { | 91 DEFINE_LEAF_RUNTIME_ENTRY(void, PrintStopMessage, 1, const char* message) { |
| 104 OS::Print("Stop message: %s\n", message); | 92 OS::Print("Stop message: %s\n", message); |
| 105 } | 93 } |
| 106 END_LEAF_RUNTIME_ENTRY | 94 END_LEAF_RUNTIME_ENTRY |
| 107 | 95 |
| 108 } // namespace dart | 96 } // namespace dart |
| 109 | 97 |
| 110 #endif // defined TARGET_ARCH_DBC | 98 #endif // defined TARGET_ARCH_DBC |
| OLD | NEW |