Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(189)

Side by Side Diff: runtime/vm/flow_graph_compiler_ia32.h

Issue 11956004: Fix vm code base so that it can be built for --arch=simarm (no snapshot yet). (Closed) Base URL: http://dart.googlecode.com/svn/branches/bleeding_edge/dart/
Patch Set: Created 7 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « runtime/vm/flow_graph_compiler_arm.cc ('k') | runtime/vm/flow_graph_compiler_ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file.
4
5 #ifndef VM_FLOW_GRAPH_COMPILER_IA32_H_
6 #define VM_FLOW_GRAPH_COMPILER_IA32_H_
7
8 #ifndef VM_FLOW_GRAPH_COMPILER_H_
9 #error Include flow_graph_compiler.h instead of flow_graph_compiler_ia32.h.
10 #endif
11
12 namespace dart {
13
14 class Code;
15 class FlowGraph;
16 template <typename T> class GrowableArray;
17 class ParsedFunction;
18
19 class FlowGraphCompiler : public ValueObject {
20 private:
21 struct BlockInfo : public ZoneAllocated {
22 public:
23 BlockInfo() : label() { }
24 Label label;
25 };
26
27 public:
28 FlowGraphCompiler(Assembler* assembler,
29 const FlowGraph& flow_graph,
30 bool is_optimizing);
31
32 ~FlowGraphCompiler();
33
34 static bool SupportsUnboxedMints();
35
36 // Accessors.
37 Assembler* assembler() const { return assembler_; }
38 const ParsedFunction& parsed_function() const { return parsed_function_; }
39 const GrowableArray<BlockEntryInstr*>& block_order() const {
40 return block_order_;
41 }
42 DescriptorList* pc_descriptors_list() const {
43 return pc_descriptors_list_;
44 }
45 BlockEntryInstr* current_block() const { return current_block_; }
46 void set_current_block(BlockEntryInstr* value) {
47 current_block_ = value;
48 }
49 static bool CanOptimize();
50 bool CanOptimizeFunction() const;
51
52 bool is_optimizing() const { return is_optimizing_; }
53
54 const GrowableArray<BlockInfo*>& block_info() const { return block_info_; }
55 ParallelMoveResolver* parallel_move_resolver() {
56 return &parallel_move_resolver_;
57 }
58
59 // Constructor is lighweight, major initialization work should occur here.
60 // This makes it easier to measure time spent in the compiler.
61 void InitCompiler();
62
63 void CompileGraph();
64
65 void VisitBlocks();
66
67 // Bail out of the flow graph compiler. Does not return to the caller.
68 void Bailout(const char* reason);
69
70 void LoadDoubleOrSmiToXmm(XmmRegister result,
71 Register reg,
72 Register temp,
73 Label* not_double_or_smi);
74
75 // Returns 'true' if code generation for this function is complete, i.e.,
76 // no fall-through to regular code is needed.
77 bool TryIntrinsify();
78
79 void GenerateCallRuntime(intptr_t token_pos,
80 const RuntimeEntry& entry,
81 LocationSummary* locs);
82
83 void GenerateCall(intptr_t token_pos,
84 const ExternalLabel* label,
85 PcDescriptors::Kind kind,
86 LocationSummary* locs);
87
88 void GenerateDartCall(intptr_t deopt_id,
89 intptr_t token_pos,
90 const ExternalLabel* label,
91 PcDescriptors::Kind kind,
92 LocationSummary* locs);
93
94 void GenerateAssertAssignable(intptr_t token_pos,
95 const AbstractType& dst_type,
96 const String& dst_name,
97 LocationSummary* locs);
98
99 void GenerateInstanceOf(intptr_t token_pos,
100 const AbstractType& type,
101 bool negate_result,
102 LocationSummary* locs);
103
104 void GenerateInstanceCall(intptr_t deopt_id,
105 intptr_t token_pos,
106 intptr_t argument_count,
107 const Array& argument_names,
108 LocationSummary* locs,
109 const ICData& ic_data);
110
111 void GenerateStaticCall(intptr_t deopt_id,
112 intptr_t token_pos,
113 const Function& function,
114 intptr_t argument_count,
115 const Array& argument_names,
116 LocationSummary* locs);
117
118 void GenerateNumberTypeCheck(Register kClassIdReg,
119 const AbstractType& type,
120 Label* is_instance_lbl,
121 Label* is_not_instance_lbl);
122 void GenerateStringTypeCheck(Register kClassIdReg,
123 Label* is_instance_lbl,
124 Label* is_not_instance_lbl);
125 void GenerateListTypeCheck(Register kClassIdReg,
126 Label* is_instance_lbl);
127
128 void EmitComment(Instruction* instr);
129
130 void EmitOptimizedInstanceCall(ExternalLabel* target_label,
131 const ICData& ic_data,
132 const Array& arguments_descriptor,
133 intptr_t argument_count,
134 intptr_t deopt_id,
135 intptr_t token_pos,
136 LocationSummary* locs);
137
138 void EmitInstanceCall(ExternalLabel* target_label,
139 const ICData& ic_data,
140 const Array& arguments_descriptor,
141 intptr_t argument_count,
142 intptr_t deopt_id,
143 intptr_t token_pos,
144 LocationSummary* locs);
145
146 void EmitMegamorphicInstanceCall(const ICData& ic_data,
147 const Array& arguments_descriptor,
148 intptr_t argument_count,
149 intptr_t deopt_id,
150 intptr_t token_pos,
151 LocationSummary* locs);
152
153 void EmitTestAndCall(const ICData& ic_data,
154 Register class_id_reg,
155 intptr_t arg_count,
156 const Array& arg_names,
157 Label* deopt,
158 intptr_t deopt_id,
159 intptr_t token_index,
160 LocationSummary* locs);
161
162 void EmitDoubleCompareBranch(Condition true_condition,
163 XmmRegister left,
164 XmmRegister right,
165 BranchInstr* branch);
166 void EmitDoubleCompareBool(Condition true_condition,
167 XmmRegister left,
168 XmmRegister right,
169 Register result);
170
171 void EmitEqualityRegConstCompare(Register reg,
172 const Object& obj,
173 bool needs_number_check);
174 void EmitEqualityRegRegCompare(Register left,
175 Register right,
176 bool needs_number_check);
177 // Implement equality: if any of the arguments is null do identity check.
178 // Fallthrough calls super equality.
179 void EmitSuperEqualityCallPrologue(Register result, Label* skip_call);
180
181 intptr_t StackSize() const;
182
183 // Returns assembler label associated with the given block entry.
184 Label* GetBlockLabel(BlockEntryInstr* block_entry) const;
185
186 // Returns true if there is a next block after the current one in
187 // the block order and if it is the given block.
188 bool IsNextBlock(BlockEntryInstr* block_entry) const;
189
190 void AddExceptionHandler(intptr_t try_index,
191 intptr_t outer_try_index,
192 intptr_t pc_offset,
193 const Array& handler_types);
194 void AddCurrentDescriptor(PcDescriptors::Kind kind,
195 intptr_t deopt_id,
196 intptr_t token_pos);
197
198 void RecordSafepoint(LocationSummary* locs);
199
200 Label* AddDeoptStub(intptr_t deopt_id, DeoptReasonId reason);
201
202 void AddDeoptIndexAtCall(intptr_t deopt_id, intptr_t token_pos);
203
204 void AddSlowPathCode(SlowPathCode* slow_path);
205
206 void FinalizeExceptionHandlers(const Code& code);
207 void FinalizePcDescriptors(const Code& code);
208 void FinalizeDeoptInfo(const Code& code);
209 void FinalizeStackmaps(const Code& code);
210 void FinalizeVarDescriptors(const Code& code);
211 void FinalizeComments(const Code& code);
212 void FinalizeStaticCallTargetsTable(const Code& code);
213
214 const Class& double_class() const { return double_class_; }
215
216 void SaveLiveRegisters(LocationSummary* locs);
217 void RestoreLiveRegisters(LocationSummary* locs);
218
219 // Returns true if the compiled function has a finally clause.
220 bool HasFinally() const;
221
222 intptr_t CurrentTryIndex() const {
223 if (current_block_ == NULL) {
224 return CatchClauseNode::kInvalidTryIndex;
225 }
226 return current_block_->try_index();
227 }
228
229 bool may_reoptimize() const { return may_reoptimize_; }
230
231 static const int kLocalsOffsetFromFP = (-1 * kWordSize);
232
233 static Condition FlipCondition(Condition condition);
234
235 static bool EvaluateCondition(Condition condition, intptr_t l, intptr_t r);
236
237 // Array/list element address computations.
238 static intptr_t DataOffsetFor(intptr_t cid);
239 static intptr_t ElementSizeFor(intptr_t cid);
240 static FieldAddress ElementAddressForIntIndex(intptr_t cid,
241 Register array,
242 intptr_t offset);
243 static FieldAddress ElementAddressForRegIndex(intptr_t cid,
244 Register array,
245 Register index);
246 static Address ExternalElementAddressForIntIndex(intptr_t cid,
247 Register array,
248 intptr_t offset);
249 static Address ExternalElementAddressForRegIndex(intptr_t cid,
250 Register array,
251 Register index);
252
253 private:
254 void EmitFrameEntry();
255
256 void AddStaticCallTarget(const Function& function);
257
258 void GenerateDeferredCode();
259
260 void EmitInstructionPrologue(Instruction* instr);
261 void EmitInstructionEpilogue(Instruction* instr);
262
263 // Emit code to load a Value into register 'dst'.
264 void LoadValue(Register dst, Value* value);
265
266 void EmitStaticCall(const Function& function,
267 const Array& arguments_descriptor,
268 intptr_t argument_count,
269 intptr_t deopt_id,
270 intptr_t token_pos,
271 LocationSummary* locs);
272
273 // Type checking helper methods.
274 void CheckClassIds(Register class_id_reg,
275 const GrowableArray<intptr_t>& class_ids,
276 Label* is_instance_lbl,
277 Label* is_not_instance_lbl);
278
279 RawSubtypeTestCache* GenerateInlineInstanceof(intptr_t token_pos,
280 const AbstractType& type,
281 Label* is_instance_lbl,
282 Label* is_not_instance_lbl);
283
284 RawSubtypeTestCache* GenerateInstantiatedTypeWithArgumentsTest(
285 intptr_t token_pos,
286 const AbstractType& dst_type,
287 Label* is_instance_lbl,
288 Label* is_not_instance_lbl);
289
290 bool GenerateInstantiatedTypeNoArgumentsTest(intptr_t token_pos,
291 const AbstractType& dst_type,
292 Label* is_instance_lbl,
293 Label* is_not_instance_lbl);
294
295 RawSubtypeTestCache* GenerateUninstantiatedTypeTest(
296 intptr_t token_pos,
297 const AbstractType& dst_type,
298 Label* is_instance_lbl,
299 Label* is_not_instance_label);
300
301 RawSubtypeTestCache* GenerateSubtype1TestCacheLookup(
302 intptr_t token_pos,
303 const Class& type_class,
304 Label* is_instance_lbl,
305 Label* is_not_instance_lbl);
306
307 enum TypeTestStubKind {
308 kTestTypeOneArg,
309 kTestTypeTwoArgs,
310 kTestTypeThreeArgs,
311 };
312
313 RawSubtypeTestCache* GenerateCallSubtypeTestStub(TypeTestStubKind test_kind,
314 Register instance_reg,
315 Register type_arguments_reg,
316 Register temp_reg,
317 Label* is_instance_lbl,
318 Label* is_not_instance_lbl);
319
320 // Returns true if checking against this type is a direct class id comparison.
321 bool TypeCheckAsClassEquality(const AbstractType& type);
322
323 void GenerateBoolToJump(Register bool_reg, Label* is_true, Label* is_false);
324
325 void CopyParameters();
326
327 void GenerateInlinedGetter(intptr_t offset);
328 void GenerateInlinedSetter(intptr_t offset);
329
330 // Perform a greedy local register allocation. Consider all registers free.
331 void AllocateRegistersLocally(Instruction* instr);
332
333 // Map a block number in a forward iteration into the block number in the
334 // corresponding reverse iteration. Used to obtain an index into
335 // block_order for reverse iterations.
336 intptr_t reverse_index(intptr_t index) const {
337 return block_order_.length() - index - 1;
338 }
339
340 class Assembler* assembler_;
341 const ParsedFunction& parsed_function_;
342 const GrowableArray<BlockEntryInstr*>& block_order_;
343
344 // Compiler specific per-block state. Indexed by postorder block number
345 // for convenience. This is not the block's index in the block order,
346 // which is reverse postorder.
347 BlockEntryInstr* current_block_;
348 ExceptionHandlerList* exception_handlers_list_;
349 DescriptorList* pc_descriptors_list_;
350 StackmapTableBuilder* stackmap_table_builder_;
351 GrowableArray<BlockInfo*> block_info_;
352 GrowableArray<CompilerDeoptInfo*> deopt_infos_;
353 GrowableArray<SlowPathCode*> slow_path_code_;
354 // Stores: [code offset, function, null(code)].
355 const GrowableObjectArray& static_calls_target_table_;
356 const bool is_optimizing_;
357 // Set to true if optimized code has IC calls.
358 bool may_reoptimize_;
359
360 const Class& double_class_;
361
362 ParallelMoveResolver parallel_move_resolver_;
363
364 // Currently instructions generate deopt stubs internally by
365 // calling AddDeoptStub. To communicate deoptimization environment
366 // that should be used when deoptimizing we store it in this variable.
367 // In future AddDeoptStub should be moved out of the instruction template.
368 Environment* pending_deoptimization_env_;
369
370 DISALLOW_COPY_AND_ASSIGN(FlowGraphCompiler);
371 };
372
373 } // namespace dart
374
375 #endif // VM_FLOW_GRAPH_COMPILER_IA32_H_
OLDNEW
« no previous file with comments | « runtime/vm/flow_graph_compiler_arm.cc ('k') | runtime/vm/flow_graph_compiler_ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698