| OLD | NEW |
| 1 // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef VM_STUB_CODE_H_ | 5 #ifndef VM_STUB_CODE_H_ |
| 6 #define VM_STUB_CODE_H_ | 6 #define VM_STUB_CODE_H_ |
| 7 | 7 |
| 8 #include "vm/allocation.h" | 8 #include "vm/allocation.h" |
| 9 #include "vm/assembler.h" | 9 #include "vm/assembler.h" |
| 10 | 10 |
| (...skipping 27 matching lines...) Expand all Loading... |
| 38 V(InvokeDartCode) \ | 38 V(InvokeDartCode) \ |
| 39 V(DebugStepCheck) \ | 39 V(DebugStepCheck) \ |
| 40 V(UnlinkedCall) \ | 40 V(UnlinkedCall) \ |
| 41 V(MonomorphicMiss) \ | 41 V(MonomorphicMiss) \ |
| 42 V(SingleTargetCall) \ | 42 V(SingleTargetCall) \ |
| 43 V(ICCallThroughFunction) \ | 43 V(ICCallThroughFunction) \ |
| 44 V(ICCallThroughCode) \ | 44 V(ICCallThroughCode) \ |
| 45 V(MegamorphicCall) \ | 45 V(MegamorphicCall) \ |
| 46 V(FixAllocationStubTarget) \ | 46 V(FixAllocationStubTarget) \ |
| 47 V(Deoptimize) \ | 47 V(Deoptimize) \ |
| 48 V(DeoptimizeLazy) \ | 48 V(DeoptimizeLazyFromReturn) \ |
| 49 V(DeoptimizeLazyFromThrow) \ |
| 49 V(UnoptimizedIdenticalWithNumberCheck) \ | 50 V(UnoptimizedIdenticalWithNumberCheck) \ |
| 50 V(OptimizedIdenticalWithNumberCheck) \ | 51 V(OptimizedIdenticalWithNumberCheck) \ |
| 51 V(ICCallBreakpoint) \ | 52 V(ICCallBreakpoint) \ |
| 52 V(RuntimeCallBreakpoint) \ | 53 V(RuntimeCallBreakpoint) \ |
| 53 V(AllocateArray) \ | 54 V(AllocateArray) \ |
| 54 V(AllocateContext) \ | 55 V(AllocateContext) \ |
| 55 V(OneArgCheckInlineCache) \ | 56 V(OneArgCheckInlineCache) \ |
| 56 V(TwoArgsCheckInlineCache) \ | 57 V(TwoArgsCheckInlineCache) \ |
| 57 V(SmiAddInlineCache) \ | 58 V(SmiAddInlineCache) \ |
| 58 V(SmiSubInlineCache) \ | 59 V(SmiSubInlineCache) \ |
| 59 V(SmiEqualInlineCache) \ | 60 V(SmiEqualInlineCache) \ |
| 60 V(OneArgOptimizedCheckInlineCache) \ | 61 V(OneArgOptimizedCheckInlineCache) \ |
| 61 V(TwoArgsOptimizedCheckInlineCache) \ | 62 V(TwoArgsOptimizedCheckInlineCache) \ |
| 62 V(ZeroArgsUnoptimizedStaticCall) \ | 63 V(ZeroArgsUnoptimizedStaticCall) \ |
| 63 V(OneArgUnoptimizedStaticCall) \ | 64 V(OneArgUnoptimizedStaticCall) \ |
| 64 V(TwoArgsUnoptimizedStaticCall) \ | 65 V(TwoArgsUnoptimizedStaticCall) \ |
| 65 V(Subtype1TestCache) \ | 66 V(Subtype1TestCache) \ |
| 66 V(Subtype2TestCache) \ | 67 V(Subtype2TestCache) \ |
| 67 V(Subtype3TestCache) \ | 68 V(Subtype3TestCache) \ |
| 68 V(CallClosureNoSuchMethod) \ | 69 V(CallClosureNoSuchMethod) \ |
| 69 V(FrameAwaitingMaterialization) \ | 70 V(FrameAwaitingMaterialization) \ |
| 70 | 71 |
| 71 #else | 72 #else |
| 72 #define VM_STUB_CODE_LIST(V) \ | 73 #define VM_STUB_CODE_LIST(V) \ |
| 73 V(LazyCompile) \ | 74 V(LazyCompile) \ |
| 74 V(FixCallersTarget) \ | 75 V(FixCallersTarget) \ |
| 75 V(Deoptimize) \ | 76 V(Deoptimize) \ |
| 76 V(DeoptimizeLazy) \ | 77 V(DeoptimizeLazyFromReturn) \ |
| 78 V(DeoptimizeLazyFromThrow) \ |
| 77 V(FrameAwaitingMaterialization) \ | 79 V(FrameAwaitingMaterialization) \ |
| 78 | 80 |
| 79 #endif // !defined(TARGET_ARCH_DBC) | 81 #endif // !defined(TARGET_ARCH_DBC) |
| 80 | 82 |
| 81 // Is it permitted for the stubs above to refer to Object::null(), which is | 83 // Is it permitted for the stubs above to refer to Object::null(), which is |
| 82 // allocated in the VM isolate and shared across all isolates. | 84 // allocated in the VM isolate and shared across all isolates. |
| 83 // However, in cases where a simple GC-safe placeholder is needed on the stack, | 85 // However, in cases where a simple GC-safe placeholder is needed on the stack, |
| 84 // using Smi 0 instead of Object::null() is slightly more efficient, since a Smi | 86 // using Smi 0 instead of Object::null() is slightly more efficient, since a Smi |
| 85 // does not require relocation. | 87 // does not require relocation. |
| 86 | 88 |
| (...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 187 const RuntimeEntry& handle_ic_miss, | 189 const RuntimeEntry& handle_ic_miss, |
| 188 Token::Kind kind, | 190 Token::Kind kind, |
| 189 bool optimized = false); | 191 bool optimized = false); |
| 190 static void GenerateUsageCounterIncrement(Assembler* assembler, | 192 static void GenerateUsageCounterIncrement(Assembler* assembler, |
| 191 Register temp_reg); | 193 Register temp_reg); |
| 192 static void GenerateOptimizedUsageCounterIncrement(Assembler* assembler); | 194 static void GenerateOptimizedUsageCounterIncrement(Assembler* assembler); |
| 193 }; | 195 }; |
| 194 | 196 |
| 195 | 197 |
| 196 enum DeoptStubKind { | 198 enum DeoptStubKind { |
| 197 kLazyDeopt, | 199 kLazyDeoptFromReturn, |
| 200 kLazyDeoptFromThrow, |
| 198 kEagerDeopt | 201 kEagerDeopt |
| 199 }; | 202 }; |
| 200 | 203 |
| 201 } // namespace dart | 204 } // namespace dart |
| 202 | 205 |
| 203 #endif // VM_STUB_CODE_H_ | 206 #endif // VM_STUB_CODE_H_ |
| OLD | NEW |