OLD | NEW |
1 // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #ifndef VM_STUB_CODE_H_ | 5 #ifndef VM_STUB_CODE_H_ |
6 #define VM_STUB_CODE_H_ | 6 #define VM_STUB_CODE_H_ |
7 | 7 |
8 #include "vm/allocation.h" | 8 #include "vm/allocation.h" |
9 #include "vm/assembler.h" | 9 #include "vm/assembler.h" |
10 | 10 |
(...skipping 17 matching lines...) Expand all Loading... |
28 V(PrintStopMessage) \ | 28 V(PrintStopMessage) \ |
29 V(CallToRuntime) \ | 29 V(CallToRuntime) \ |
30 V(LazyCompile) \ | 30 V(LazyCompile) \ |
31 V(CallBootstrapCFunction) \ | 31 V(CallBootstrapCFunction) \ |
32 V(CallNativeCFunction) \ | 32 V(CallNativeCFunction) \ |
33 V(FixCallersTarget) \ | 33 V(FixCallersTarget) \ |
34 V(CallStaticFunction) \ | 34 V(CallStaticFunction) \ |
35 V(OptimizeFunction) \ | 35 V(OptimizeFunction) \ |
36 V(InvokeDartCode) \ | 36 V(InvokeDartCode) \ |
37 V(DebugStepCheck) \ | 37 V(DebugStepCheck) \ |
| 38 V(ICLookup) \ |
38 V(MegamorphicLookup) \ | 39 V(MegamorphicLookup) \ |
39 V(FixAllocationStubTarget) \ | 40 V(FixAllocationStubTarget) \ |
40 V(Deoptimize) \ | 41 V(Deoptimize) \ |
41 V(DeoptimizeLazy) \ | 42 V(DeoptimizeLazy) \ |
42 V(UnoptimizedIdenticalWithNumberCheck) \ | 43 V(UnoptimizedIdenticalWithNumberCheck) \ |
43 V(OptimizedIdenticalWithNumberCheck) \ | 44 V(OptimizedIdenticalWithNumberCheck) \ |
44 V(ICCallBreakpoint) \ | 45 V(ICCallBreakpoint) \ |
45 V(RuntimeCallBreakpoint) \ | 46 V(RuntimeCallBreakpoint) \ |
46 V(AllocateArray) \ | 47 V(AllocateArray) \ |
47 V(AllocateContext) \ | 48 V(AllocateContext) \ |
48 V(OneArgCheckInlineCache) \ | 49 V(OneArgCheckInlineCache) \ |
49 V(TwoArgsCheckInlineCache) \ | 50 V(TwoArgsCheckInlineCache) \ |
50 V(SmiAddInlineCache) \ | 51 V(SmiAddInlineCache) \ |
51 V(SmiSubInlineCache) \ | 52 V(SmiSubInlineCache) \ |
52 V(SmiEqualInlineCache) \ | 53 V(SmiEqualInlineCache) \ |
53 V(UnaryRangeCollectingInlineCache) \ | 54 V(UnaryRangeCollectingInlineCache) \ |
54 V(BinaryRangeCollectingInlineCache) \ | 55 V(BinaryRangeCollectingInlineCache) \ |
55 V(OneArgOptimizedCheckInlineCache) \ | 56 V(OneArgOptimizedCheckInlineCache) \ |
56 V(TwoArgsOptimizedCheckInlineCache) \ | 57 V(TwoArgsOptimizedCheckInlineCache) \ |
57 V(ZeroArgsUnoptimizedStaticCall) \ | 58 V(ZeroArgsUnoptimizedStaticCall) \ |
58 V(OneArgUnoptimizedStaticCall) \ | 59 V(OneArgUnoptimizedStaticCall) \ |
59 V(TwoArgsUnoptimizedStaticCall) \ | 60 V(TwoArgsUnoptimizedStaticCall) \ |
60 V(Subtype1TestCache) \ | 61 V(Subtype1TestCache) \ |
61 V(Subtype2TestCache) \ | 62 V(Subtype2TestCache) \ |
62 V(Subtype3TestCache) \ | 63 V(Subtype3TestCache) \ |
63 V(CallClosureNoSuchMethod) | 64 V(CallClosureNoSuchMethod) \ |
64 | 65 |
65 // Is it permitted for the stubs above to refer to Object::null(), which is | 66 // Is it permitted for the stubs above to refer to Object::null(), which is |
66 // allocated in the VM isolate and shared across all isolates. | 67 // allocated in the VM isolate and shared across all isolates. |
67 // However, in cases where a simple GC-safe placeholder is needed on the stack, | 68 // However, in cases where a simple GC-safe placeholder is needed on the stack, |
68 // using Smi 0 instead of Object::null() is slightly more efficient, since a Smi | 69 // using Smi 0 instead of Object::null() is slightly more efficient, since a Smi |
69 // does not require relocation. | 70 // does not require relocation. |
70 | 71 |
71 // class StubEntry is used to describe stub methods generated in dart to | 72 // class StubEntry is used to describe stub methods generated in dart to |
72 // abstract out common code executed from generated dart code. | 73 // abstract out common code executed from generated dart code. |
73 class StubEntry { | 74 class StubEntry { |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
132 } | 133 } |
133 VM_STUB_CODE_LIST(STUB_CODE_ACCESSOR); | 134 VM_STUB_CODE_LIST(STUB_CODE_ACCESSOR); |
134 #undef STUB_CODE_ACCESSOR | 135 #undef STUB_CODE_ACCESSOR |
135 | 136 |
136 static RawCode* GetAllocationStubForClass(const Class& cls); | 137 static RawCode* GetAllocationStubForClass(const Class& cls); |
137 | 138 |
138 static const StubEntry* UnoptimizedStaticCallEntry(intptr_t num_args_tested); | 139 static const StubEntry* UnoptimizedStaticCallEntry(intptr_t num_args_tested); |
139 | 140 |
140 static const intptr_t kNoInstantiator = 0; | 141 static const intptr_t kNoInstantiator = 0; |
141 | 142 |
142 static void EmitMegamorphicLookup( | 143 static void EmitMegamorphicLookup(Assembler* assembler); |
143 Assembler*, Register recv, Register cache, Register target); | |
144 | 144 |
145 private: | 145 private: |
146 friend class MegamorphicCacheTable; | 146 friend class MegamorphicCacheTable; |
147 | 147 |
148 static const intptr_t kStubCodeSize = 4 * KB; | 148 static const intptr_t kStubCodeSize = 4 * KB; |
149 | 149 |
150 #define STUB_CODE_GENERATE(name) \ | 150 #define STUB_CODE_GENERATE(name) \ |
151 static void Generate##name##Stub(Assembler* assembler); | 151 static void Generate##name##Stub(Assembler* assembler); |
152 VM_STUB_CODE_LIST(STUB_CODE_GENERATE); | 152 VM_STUB_CODE_LIST(STUB_CODE_GENERATE); |
153 #undef STUB_CODE_GENERATE | 153 #undef STUB_CODE_GENERATE |
(...skipping 30 matching lines...) Expand all Loading... |
184 | 184 |
185 | 185 |
186 enum DeoptStubKind { | 186 enum DeoptStubKind { |
187 kLazyDeopt, | 187 kLazyDeopt, |
188 kEagerDeopt | 188 kEagerDeopt |
189 }; | 189 }; |
190 | 190 |
191 } // namespace dart | 191 } // namespace dart |
192 | 192 |
193 #endif // VM_STUB_CODE_H_ | 193 #endif // VM_STUB_CODE_H_ |
OLD | NEW |