OLD | NEW |
---|---|
(Empty) | |
1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file | |
2 // for details. All rights reserved. Use of this source code is governed by a | |
3 // BSD-style license that can be found in the LICENSE file. | |
4 | |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_DBC. | |
6 #if defined(TARGET_ARCH_DBC) | |
7 | |
8 #include "vm/intermediate_language.h" | |
9 | |
10 #include "vm/cpu.h" | |
11 #include "vm/compiler.h" | |
12 #include "vm/dart_entry.h" | |
13 #include "vm/flow_graph.h" | |
14 #include "vm/flow_graph_compiler.h" | |
15 #include "vm/flow_graph_range_analysis.h" | |
16 #include "vm/locations.h" | |
17 #include "vm/object_store.h" | |
18 #include "vm/parser.h" | |
19 #include "vm/simulator.h" | |
20 #include "vm/stack_frame.h" | |
21 #include "vm/stub_code.h" | |
22 #include "vm/symbols.h" | |
23 | |
24 #define __ compiler->assembler()-> | |
25 | |
26 namespace dart { | |
27 | |
28 DECLARE_FLAG(bool, allow_absolute_addresses); | |
29 DECLARE_FLAG(bool, emit_edge_counters); | |
30 DECLARE_FLAG(int, optimization_counter_threshold); | |
31 | |
32 // List of instructions that are still unimplemented by DBC backend. | |
33 #define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \ | |
34 M(Stop) \ | |
35 M(IndirectGoto) \ | |
36 M(LoadCodeUnits) \ | |
37 M(InstanceOf) \ | |
38 M(LoadUntagged) \ | |
39 M(AllocateUninitializedContext) \ | |
40 M(BinaryInt32Op) \ | |
41 M(UnarySmiOp) \ | |
42 M(UnaryDoubleOp) \ | |
43 M(SmiToDouble) \ | |
44 M(Int32ToDouble) \ | |
45 M(MintToDouble) \ | |
46 M(DoubleToInteger) \ | |
47 M(DoubleToSmi) \ | |
48 M(DoubleToDouble) \ | |
49 M(DoubleToFloat) \ | |
50 M(FloatToDouble) \ | |
51 M(UnboxedConstant) \ | |
52 M(CheckEitherNonSmi) \ | |
53 M(BinaryDoubleOp) \ | |
54 M(MathUnary) \ | |
55 M(MathMinMax) \ | |
56 M(Box) \ | |
57 M(Unbox) \ | |
58 M(BoxInt64) \ | |
59 M(CaseInsensitiveCompareUC16) \ | |
60 M(BinaryMintOp) \ | |
61 M(ShiftMintOp) \ | |
62 M(UnaryMintOp) \ | |
63 M(StringToCharCode) \ | |
64 M(StringFromCharCode) \ | |
65 M(InvokeMathCFunction) \ | |
66 M(MergedMath) \ | |
67 M(GuardFieldClass) \ | |
68 M(GuardFieldLength) \ | |
69 M(IfThenElse) \ | |
70 M(BinaryFloat32x4Op) \ | |
71 M(Simd32x4Shuffle) \ | |
72 M(Simd32x4ShuffleMix) \ | |
73 M(Simd32x4GetSignMask) \ | |
74 M(Float32x4Constructor) \ | |
75 M(Float32x4Zero) \ | |
76 M(Float32x4Splat) \ | |
77 M(Float32x4Comparison) \ | |
78 M(Float32x4MinMax) \ | |
79 M(Float32x4Scale) \ | |
80 M(Float32x4Sqrt) \ | |
81 M(Float32x4ZeroArg) \ | |
82 M(Float32x4Clamp) \ | |
83 M(Float32x4With) \ | |
84 M(Float32x4ToInt32x4) \ | |
85 M(Int32x4Constructor) \ | |
86 M(Int32x4BoolConstructor) \ | |
87 M(Int32x4GetFlag) \ | |
88 M(Int32x4Select) \ | |
89 M(Int32x4SetFlag) \ | |
90 M(Int32x4ToFloat32x4) \ | |
91 M(BinaryInt32x4Op) \ | |
92 M(TestCids) \ | |
93 M(BinaryFloat64x2Op) \ | |
94 M(Float64x2Zero) \ | |
95 M(Float64x2Constructor) \ | |
96 M(Float64x2Splat) \ | |
97 M(Float32x4ToFloat64x2) \ | |
98 M(Float64x2ToFloat32x4) \ | |
99 M(Simd64x2Shuffle) \ | |
100 M(Float64x2ZeroArg) \ | |
101 M(Float64x2OneArg) \ | |
102 M(ExtractNthOutput) \ | |
103 M(BinaryUint32Op) \ | |
104 M(ShiftUint32Op) \ | |
105 M(UnaryUint32Op) \ | |
106 M(UnboxedIntConverter) \ | |
107 M(GrowRegExpStack) \ | |
108 M(BoxInteger32) \ | |
109 M(UnboxInteger32) \ | |
110 M(CheckedSmiOp) \ | |
111 M(CheckArrayBound) \ | |
112 M(CheckSmi) \ | |
113 M(LoadClassId) \ | |
114 M(CheckClassId) \ | |
115 M(CheckClass) \ | |
116 M(BinarySmiOp) \ | |
117 M(TestSmi) \ | |
118 M(RelationalOp) \ | |
119 M(EqualityCompare) \ | |
120 M(LoadIndexed) \ | |
121 // Location summaries actually are not used by the unoptimizing DBC compiler | |
122 // because we don't allocate any registers. | |
123 static LocationSummary* CreateLocationSummary(Zone* zone, | |
124 intptr_t num_inputs, | |
125 bool has_result) { | |
126 const intptr_t kNumTemps = 0; | |
127 LocationSummary* locs = new(zone) LocationSummary( | |
128 zone, num_inputs, kNumTemps, LocationSummary::kNoCall); | |
129 for (intptr_t i = 0; i < num_inputs; i++) | |
zra
2016/04/14 18:27:48
{} please
Vyacheslav Egorov (Google)
2016/04/18 15:56:42
Done.
| |
130 locs->set_in(i, Location::RequiresRegister()); | |
131 if (has_result) { | |
132 locs->set_out(0, Location::RequiresRegister()); | |
133 } | |
134 return locs; | |
135 } | |
136 | |
137 | |
138 #define DEFINE_MAKE_LOCATION_SUMMARY(Name, In, Out) \ | |
139 LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \ | |
140 const { \ | |
141 return CreateLocationSummary(zone, In, Out); \ | |
142 } \ | |
143 | |
144 #define EMIT_NATIVE_CODE(Name, In, Out) \ | |
145 DEFINE_MAKE_LOCATION_SUMMARY(Name, In, Out); \ | |
146 void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) \ | |
147 | |
148 #define DEFINE_UNIMPLEMENTED_MAKE_LOCATION_SUMMARY(Name) \ | |
149 LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \ | |
150 const { \ | |
151 UNIMPLEMENTED(); \ | |
152 return NULL; \ | |
153 } \ | |
154 | |
155 #define DEFINE_UNIMPLEMENTED_EMIT_NATIVE_CODE(Name) \ | |
156 void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) { \ | |
157 UNIMPLEMENTED(); \ | |
158 } | |
159 | |
160 #define DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(Name) \ | |
161 void Name##Instr::EmitBranchCode(FlowGraphCompiler*, BranchInstr*) { \ | |
162 UNIMPLEMENTED(); \ | |
163 } \ | |
164 Condition Name##Instr::EmitComparisonCode(FlowGraphCompiler*, \ | |
165 BranchLabels) { \ | |
166 UNIMPLEMENTED(); \ | |
167 return EQ; \ | |
168 } | |
169 | |
170 #define DEFINE_UNIMPLEMENTED(Name) \ | |
171 DEFINE_UNIMPLEMENTED_MAKE_LOCATION_SUMMARY(Name) \ | |
172 DEFINE_UNIMPLEMENTED_EMIT_NATIVE_CODE(Name) \ | |
173 | |
174 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED) | |
175 | |
176 #undef DEFINE_UNIMPLEMENTED | |
177 | |
178 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(TestCids) | |
179 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(TestSmi) | |
180 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(RelationalOp) | |
181 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(EqualityCompare) | |
182 | |
183 | |
184 DEFINE_MAKE_LOCATION_SUMMARY(AssertAssignable, 2, true); | |
185 | |
186 | |
187 EMIT_NATIVE_CODE(AssertBoolean, 1, true) { | |
188 __ AssertBoolean(Isolate::Current()->type_checks() ? 1 : 0); | |
189 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, | |
190 deopt_id(), | |
191 token_pos()); | |
192 } | |
193 | |
194 | |
195 LocationSummary* PolymorphicInstanceCallInstr::MakeLocationSummary(Zone* zone, | |
196 bool optimizing) const { | |
197 return MakeCallSummary(zone); | |
198 } | |
199 | |
200 | |
201 void PolymorphicInstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | |
202 UNIMPLEMENTED(); | |
203 } | |
204 | |
205 | |
206 EMIT_NATIVE_CODE(CheckStackOverflow, 0, false) { | |
207 __ CheckStack(); | |
208 compiler->AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, | |
209 Thread::kNoDeoptId, | |
210 token_pos()); | |
211 } | |
212 | |
213 | |
214 EMIT_NATIVE_CODE(PushArgument, 1, false) { | |
215 if (compiler->is_optimizing()) { | |
216 __ Push(locs()->in(0).reg()); | |
217 } | |
218 } | |
219 | |
220 | |
221 EMIT_NATIVE_CODE(LoadLocal, 0, false) { | |
222 ASSERT(!compiler->is_optimizing()); | |
223 ASSERT(local().index() != 0); | |
224 __ Push((local().index() > 0) ? (-local().index()) : (-local().index() - 1)); | |
225 } | |
226 | |
227 | |
228 EMIT_NATIVE_CODE(StoreLocal, 0, false) { | |
229 ASSERT(!compiler->is_optimizing()); | |
230 ASSERT(local().index() != 0); | |
231 if (HasTemp()) { | |
232 __ StoreLocal( | |
233 (local().index() > 0) ? (-local().index()) : (-local().index() - 1)); | |
234 } else { | |
235 __ PopLocal( | |
236 (local().index() > 0) ? (-local().index()) : (-local().index() - 1)); | |
237 } | |
238 } | |
239 | |
240 | |
241 EMIT_NATIVE_CODE(Constant, 0, true) { | |
242 const intptr_t kidx = __ object_pool_wrapper().FindObject(value()); | |
243 if (compiler->is_optimizing()) { | |
244 __ LoadConstant(locs()->out(0).reg(), kidx); | |
245 } else { | |
246 __ PushConstant(kidx); | |
247 } | |
248 } | |
249 | |
250 | |
251 EMIT_NATIVE_CODE(Return, 1, false) { | |
252 __ ReturnTOS(); | |
253 } | |
254 | |
255 | |
256 EMIT_NATIVE_CODE(StoreStaticField, 1, false) { | |
257 const intptr_t kidx = __ object_pool_wrapper().FindObject(field()); | |
258 __ StoreStaticTOS(kidx); | |
259 } | |
260 | |
261 | |
262 EMIT_NATIVE_CODE(LoadStaticField, 1, true) { | |
263 const intptr_t kidx = __ object_pool_wrapper().FindObject(StaticField()); | |
264 __ PushStatic(kidx); | |
265 } | |
266 | |
267 | |
268 EMIT_NATIVE_CODE(InitStaticField, 0, false) { | |
269 ASSERT(!compiler->is_optimizing()); | |
270 __ InitStaticTOS(); | |
271 } | |
272 | |
273 | |
274 EMIT_NATIVE_CODE(ClosureCall, 0, false) { | |
275 intptr_t argument_count = ArgumentCount(); | |
276 const Array& arguments_descriptor = | |
277 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, | |
278 argument_names())); | |
279 const intptr_t argdesc_kidx = | |
280 compiler->assembler()->AddConstant(arguments_descriptor); | |
281 __ StaticCall(argument_count, argdesc_kidx); | |
282 | |
283 compiler->RecordSafepoint(locs()); | |
284 // Marks either the continuation point in unoptimized code or the | |
285 // deoptimization point in optimized code, after call. | |
286 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id()); | |
287 if (compiler->is_optimizing()) { | |
288 compiler->AddDeoptIndexAtCall(deopt_id_after, token_pos()); | |
289 } | |
290 // Add deoptimization continuation point after the call and before the | |
291 // arguments are removed. | |
292 // In optimized code this descriptor is needed for exception handling. | |
293 compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt, | |
294 deopt_id_after, | |
295 token_pos()); | |
296 } | |
297 | |
298 | |
299 static void EmitBranchOnCondition(FlowGraphCompiler* compiler, | |
300 Condition true_condition, | |
301 BranchLabels labels) { | |
302 if (labels.fall_through == labels.false_label) { | |
303 // If the next block is the false successor, fall through to it. | |
304 __ Jump(labels.true_label); | |
305 } else { | |
306 // If the next block is not the false successor, branch to it. | |
307 __ Jump(labels.false_label); | |
308 | |
309 // Fall through or jump to the true successor. | |
310 if (labels.fall_through != labels.true_label) { | |
311 __ Jump(labels.true_label); | |
312 } | |
313 } | |
314 } | |
315 | |
316 | |
317 Condition StrictCompareInstr::EmitComparisonCode(FlowGraphCompiler* compiler, | |
318 BranchLabels labels) { | |
319 ASSERT((kind() == Token::kNE_STRICT) || | |
320 (kind() == Token::kEQ_STRICT)); | |
321 const Bytecode::Opcode eq_op = needs_number_check() ? | |
322 Bytecode::kIfEqStrictNumTOS : Bytecode::kIfEqStrictTOS; | |
323 const Bytecode::Opcode ne_op = needs_number_check() ? | |
324 Bytecode::kIfNeStrictNumTOS : Bytecode::kIfNeStrictTOS; | |
325 | |
326 if (kind() == Token::kEQ_STRICT) { | |
327 __ Emit((labels.fall_through == labels.false_label) ? eq_op : ne_op); | |
328 } else { | |
329 __ Emit((labels.fall_through == labels.false_label) ? ne_op : eq_op); | |
330 } | |
331 | |
332 if (needs_number_check() && token_pos().IsReal()) { | |
333 compiler->AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, | |
334 Thread::kNoDeoptId, | |
335 token_pos()); | |
336 } | |
337 return EQ; | |
338 } | |
339 | |
340 | |
341 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, | |
342 BranchInstr* branch) { | |
343 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); | |
zra
2016/04/14 18:27:48
Add parents and format as above?
Vyacheslav Egorov (Google)
2016/04/18 15:56:41
Done.
| |
344 | |
345 BranchLabels labels = compiler->CreateBranchLabels(branch); | |
346 Condition true_condition = EmitComparisonCode(compiler, labels); | |
347 EmitBranchOnCondition(compiler, true_condition, labels); | |
348 } | |
349 | |
350 | |
351 EMIT_NATIVE_CODE(StrictCompare, 2, true) { | |
352 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); | |
zra
2016/04/14 18:27:48
ditto
Vyacheslav Egorov (Google)
2016/04/18 15:56:42
Done.
| |
353 | |
354 Label is_true, is_false; | |
355 BranchLabels labels = { &is_true, &is_false, &is_false }; | |
356 Condition true_condition = EmitComparisonCode(compiler, labels); | |
357 EmitBranchOnCondition(compiler, true_condition, labels); | |
358 Label done; | |
359 __ Bind(&is_false); | |
360 __ PushConstant(Bool::False()); | |
361 __ Jump(&done); | |
362 __ Bind(&is_true); | |
363 __ PushConstant(Bool::True()); | |
364 __ Bind(&done); | |
365 } | |
366 | |
367 | |
368 LocationSummary* BranchInstr::MakeLocationSummary(Zone* zone, | |
369 bool opt) const { | |
370 comparison()->InitializeLocationSummary(zone, opt); | |
371 // Branches don't produce a result. | |
372 comparison()->locs()->set_out(0, Location::NoLocation()); | |
373 return comparison()->locs(); | |
374 } | |
375 | |
376 | |
377 void BranchInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | |
378 comparison()->EmitBranchCode(compiler, this); | |
379 } | |
380 | |
381 | |
382 EMIT_NATIVE_CODE(Goto, 0, false) { | |
383 if (HasParallelMove()) { | |
384 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); | |
385 } | |
386 // We can fall through if the successor is the next block in the list. | |
387 // Otherwise, we need a jump. | |
388 if (!compiler->CanFallThroughTo(successor())) { | |
389 __ Jump(compiler->GetJumpLabel(successor())); | |
390 } | |
391 } | |
392 | |
393 | |
394 EMIT_NATIVE_CODE(CreateArray, 2, true) { | |
395 __ CreateArrayTOS(); | |
396 } | |
397 | |
398 | |
399 EMIT_NATIVE_CODE(StoreIndexed, 3, false) { | |
400 ASSERT(class_id() == kArrayCid); | |
401 __ StoreIndexedTOS(); | |
402 } | |
403 | |
404 | |
405 EMIT_NATIVE_CODE(StringInterpolate, 0, false) { | |
406 const intptr_t kArgumentCount = 1; | |
407 const Array& arguments_descriptor = Array::Handle( | |
408 ArgumentsDescriptor::New(kArgumentCount, Object::null_array())); | |
409 __ PushConstant(CallFunction()); | |
410 const intptr_t argdesc_kidx = __ AddConstant(arguments_descriptor); | |
411 __ StaticCall(kArgumentCount, argdesc_kidx); | |
412 } | |
413 | |
414 | |
415 EMIT_NATIVE_CODE(NativeCall, 0, false) { | |
416 SetupNative(); | |
417 | |
418 const intptr_t argc_tag = NativeArguments::ComputeArgcTag(function()); | |
419 | |
420 ASSERT(!link_lazily()); | |
421 const ExternalLabel label(reinterpret_cast<uword>(native_c_function())); | |
422 const intptr_t target_kidx = | |
423 __ object_pool_wrapper().FindImmediate(label.address()); | |
424 const intptr_t argc_tag_kidx = | |
425 __ object_pool_wrapper().FindImmediate(static_cast<uword>(argc_tag)); | |
426 __ PushConstant(target_kidx); | |
427 __ PushConstant(argc_tag_kidx); | |
428 if (is_bootstrap_native()) { | |
429 __ NativeBootstrapCall(); | |
430 } else { | |
431 __ NativeCall(); | |
432 } | |
433 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, | |
434 Thread::kNoDeoptId, | |
435 token_pos()); | |
436 } | |
437 | |
438 | |
439 EMIT_NATIVE_CODE(AllocateObject, 0, true) { | |
440 if (ArgumentCount() == 1) { | |
441 __ PushConstant(cls()); | |
442 __ AllocateT(); | |
443 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, | |
444 Thread::kNoDeoptId, | |
445 token_pos()); | |
446 } else { | |
447 const intptr_t kidx = __ AddConstant(cls()); | |
448 __ Allocate(kidx); | |
449 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, | |
450 Thread::kNoDeoptId, | |
451 token_pos()); | |
452 } | |
453 } | |
454 | |
455 | |
456 EMIT_NATIVE_CODE(StoreInstanceField, 2, false) { | |
457 ASSERT(!HasTemp()); | |
458 ASSERT(offset_in_bytes() % kWordSize == 0); | |
459 if (compiler->is_optimizing()) { | |
460 const Register value = locs()->in(1).reg(); | |
461 const Register instance = locs()->in(0).reg(); | |
462 __ StoreField(instance, offset_in_bytes() / kWordSize, value); | |
463 } else { | |
464 __ StoreFieldTOS(offset_in_bytes() / kWordSize); | |
465 } | |
466 } | |
467 | |
468 | |
469 EMIT_NATIVE_CODE(LoadField, 1, true) { | |
470 ASSERT(offset_in_bytes() % kWordSize == 0); | |
471 __ LoadFieldTOS(offset_in_bytes() / kWordSize); | |
472 } | |
473 | |
474 | |
475 EMIT_NATIVE_CODE(BooleanNegate, 1, true) { | |
476 __ BooleanNegateTOS(); | |
477 } | |
478 | |
479 | |
480 EMIT_NATIVE_CODE(AllocateContext, 0, false) { | |
481 __ AllocateContext(num_context_variables()); | |
482 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, | |
483 Thread::kNoDeoptId, | |
484 token_pos()); | |
485 } | |
486 | |
487 | |
488 EMIT_NATIVE_CODE(CloneContext, 0, false) { | |
489 __ CloneContext(); | |
490 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, | |
491 Thread::kNoDeoptId, | |
492 token_pos()); | |
493 } | |
494 | |
495 | |
496 EMIT_NATIVE_CODE(CatchBlockEntry, 0, false) { | |
497 __ Bind(compiler->GetJumpLabel(this)); | |
498 compiler->AddExceptionHandler(catch_try_index(), | |
499 try_index(), | |
500 compiler->assembler()->CodeSize(), | |
501 catch_handler_types_, | |
502 needs_stacktrace()); | |
503 __ MoveSpecial(-exception_var().index()-1, | |
504 Simulator::kExceptionSpecialIndex); | |
505 __ MoveSpecial(-stacktrace_var().index()-1, | |
506 Simulator::kStacktraceSpecialIndex); | |
507 __ SetFrame(compiler->StackSize()); | |
508 } | |
509 | |
510 | |
511 EMIT_NATIVE_CODE(Throw, 0, false) { | |
512 __ Throw(0); | |
513 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, | |
514 deopt_id(), | |
515 token_pos()); | |
516 __ Trap(); | |
517 } | |
518 | |
519 | |
520 EMIT_NATIVE_CODE(ReThrow, 0, false) { | |
521 compiler->SetNeedsStacktrace(catch_try_index()); | |
522 __ Throw(1); | |
523 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, | |
524 deopt_id(), | |
525 token_pos()); | |
526 __ Trap(); | |
527 } | |
528 | |
529 EMIT_NATIVE_CODE(InstantiateType, 1, true) { | |
530 __ InstantiateType(__ AddConstant(type())); | |
531 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, | |
532 deopt_id(), | |
533 token_pos()); | |
534 } | |
535 | |
536 EMIT_NATIVE_CODE(InstantiateTypeArguments, 1, true) { | |
537 __ InstantiateTypeArgumentsTOS( | |
538 type_arguments().IsRawInstantiatedRaw(type_arguments().Length()), | |
539 __ AddConstant(type_arguments())); | |
540 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, | |
541 deopt_id(), | |
542 token_pos()); | |
543 } | |
544 | |
545 | |
546 void DebugStepCheckInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | |
547 __ DebugStep(); | |
548 compiler->AddCurrentDescriptor(stub_kind_, Thread::kNoDeoptId, token_pos()); | |
549 } | |
550 | |
551 | |
552 void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | |
zra
2016/04/14 18:27:48
Maybe comment about why this is empty.
Vyacheslav Egorov (Google)
2016/04/18 15:56:41
Done.
| |
553 } | |
554 | |
555 | |
556 LocationSummary* Instruction::MakeCallSummary(Zone* zone) { | |
557 LocationSummary* result = new(zone) LocationSummary( | |
558 zone, 0, 0, LocationSummary::kCall); | |
559 result->set_out(0, Location::RequiresRegister()); | |
560 return result; | |
561 } | |
562 | |
563 | |
564 CompileType BinaryUint32OpInstr::ComputeType() const { | |
565 return CompileType::Int(); | |
566 } | |
567 | |
568 | |
569 CompileType ShiftUint32OpInstr::ComputeType() const { | |
570 return CompileType::Int(); | |
571 } | |
572 | |
573 | |
574 CompileType UnaryUint32OpInstr::ComputeType() const { | |
575 return CompileType::Int(); | |
576 } | |
577 | |
578 | |
579 static const intptr_t kMintShiftCountLimit = 63; | |
580 | |
581 | |
582 bool ShiftMintOpInstr::has_shift_count_check() const { | |
583 return !RangeUtils::IsWithin( | |
584 right()->definition()->range(), 0, kMintShiftCountLimit); | |
585 } | |
586 | |
587 | |
588 CompileType LoadIndexedInstr::ComputeType() const { | |
589 switch (class_id_) { | |
590 case kArrayCid: | |
591 case kImmutableArrayCid: | |
592 return CompileType::Dynamic(); | |
593 | |
594 case kTypedDataFloat32ArrayCid: | |
595 case kTypedDataFloat64ArrayCid: | |
596 return CompileType::FromCid(kDoubleCid); | |
597 case kTypedDataFloat32x4ArrayCid: | |
598 return CompileType::FromCid(kFloat32x4Cid); | |
599 case kTypedDataInt32x4ArrayCid: | |
600 return CompileType::FromCid(kInt32x4Cid); | |
601 case kTypedDataFloat64x2ArrayCid: | |
602 return CompileType::FromCid(kFloat64x2Cid); | |
603 | |
604 case kTypedDataInt8ArrayCid: | |
605 case kTypedDataUint8ArrayCid: | |
606 case kTypedDataUint8ClampedArrayCid: | |
607 case kExternalTypedDataUint8ArrayCid: | |
608 case kExternalTypedDataUint8ClampedArrayCid: | |
609 case kTypedDataInt16ArrayCid: | |
610 case kTypedDataUint16ArrayCid: | |
611 case kOneByteStringCid: | |
612 case kTwoByteStringCid: | |
613 return CompileType::FromCid(kSmiCid); | |
614 | |
615 case kTypedDataInt32ArrayCid: | |
616 case kTypedDataUint32ArrayCid: | |
617 return CompileType::Int(); | |
618 | |
619 default: | |
620 UNREACHABLE(); | |
621 return CompileType::Dynamic(); | |
622 } | |
623 } | |
624 | |
625 | |
626 Representation LoadIndexedInstr::representation() const { | |
627 switch (class_id_) { | |
628 case kArrayCid: | |
629 case kImmutableArrayCid: | |
630 case kTypedDataInt8ArrayCid: | |
631 case kTypedDataUint8ArrayCid: | |
632 case kTypedDataUint8ClampedArrayCid: | |
633 case kExternalTypedDataUint8ArrayCid: | |
634 case kExternalTypedDataUint8ClampedArrayCid: | |
635 case kTypedDataInt16ArrayCid: | |
636 case kTypedDataUint16ArrayCid: | |
637 case kOneByteStringCid: | |
638 case kTwoByteStringCid: | |
639 return kTagged; | |
640 case kTypedDataInt32ArrayCid: | |
641 return kUnboxedInt32; | |
642 case kTypedDataUint32ArrayCid: | |
643 return kUnboxedUint32; | |
644 case kTypedDataFloat32ArrayCid: | |
645 case kTypedDataFloat64ArrayCid: | |
646 return kUnboxedDouble; | |
647 case kTypedDataInt32x4ArrayCid: | |
648 return kUnboxedInt32x4; | |
649 case kTypedDataFloat32x4ArrayCid: | |
650 return kUnboxedFloat32x4; | |
651 case kTypedDataFloat64x2ArrayCid: | |
652 return kUnboxedFloat64x2; | |
653 default: | |
654 UNREACHABLE(); | |
655 return kTagged; | |
656 } | |
657 } | |
658 | |
659 | |
660 Representation StoreIndexedInstr::RequiredInputRepresentation( | |
661 intptr_t idx) const { | |
662 // Array can be a Dart object or a pointer to external data. | |
663 if (idx == 0) return kNoRepresentation; // Flexible input representation. | |
zra
2016/04/14 18:27:48
{} please
Vyacheslav Egorov (Google)
2016/04/18 15:56:42
This code matches code in all other backends.
| |
664 if (idx == 1) return kTagged; // Index is a smi. | |
zra
2016/04/14 18:27:48
ditto
Vyacheslav Egorov (Google)
2016/04/18 15:56:42
This code matches code in all other backends.
| |
665 ASSERT(idx == 2); | |
666 switch (class_id_) { | |
667 case kArrayCid: | |
668 case kOneByteStringCid: | |
669 case kTypedDataInt8ArrayCid: | |
670 case kTypedDataUint8ArrayCid: | |
671 case kExternalTypedDataUint8ArrayCid: | |
672 case kTypedDataUint8ClampedArrayCid: | |
673 case kExternalTypedDataUint8ClampedArrayCid: | |
674 case kTypedDataInt16ArrayCid: | |
675 case kTypedDataUint16ArrayCid: | |
676 return kTagged; | |
677 case kTypedDataInt32ArrayCid: | |
678 return kUnboxedInt32; | |
679 case kTypedDataUint32ArrayCid: | |
680 return kUnboxedUint32; | |
681 case kTypedDataFloat32ArrayCid: | |
682 case kTypedDataFloat64ArrayCid: | |
683 return kUnboxedDouble; | |
684 case kTypedDataFloat32x4ArrayCid: | |
685 return kUnboxedFloat32x4; | |
686 case kTypedDataInt32x4ArrayCid: | |
687 return kUnboxedInt32x4; | |
688 case kTypedDataFloat64x2ArrayCid: | |
689 return kUnboxedFloat64x2; | |
690 default: | |
691 UNREACHABLE(); | |
692 return kTagged; | |
693 } | |
694 } | |
695 | |
696 } // namespace dart | |
697 | |
698 #endif // defined TARGET_ARCH_DBC | |
OLD | NEW |