Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(183)

Side by Side Diff: runtime/vm/intermediate_language_dbc.cc

Issue 1992963002: Enable optimizer pipeline for DBC. (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_DBC. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_DBC.
6 #if defined(TARGET_ARCH_DBC) 6 #if defined(TARGET_ARCH_DBC)
7 7
8 #include "vm/intermediate_language.h" 8 #include "vm/intermediate_language.h"
9 9
10 #include "vm/cpu.h" 10 #include "vm/cpu.h"
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
112 M(CheckClassId) \ 112 M(CheckClassId) \
113 M(CheckClass) \ 113 M(CheckClass) \
114 M(BinarySmiOp) \ 114 M(BinarySmiOp) \
115 M(TestSmi) \ 115 M(TestSmi) \
116 M(RelationalOp) \ 116 M(RelationalOp) \
117 M(EqualityCompare) \ 117 M(EqualityCompare) \
118 M(LoadIndexed) 118 M(LoadIndexed)
119 119
120 // Location summaries actually are not used by the unoptimizing DBC compiler 120 // Location summaries actually are not used by the unoptimizing DBC compiler
121 // because we don't allocate any registers. 121 // because we don't allocate any registers.
122 static LocationSummary* CreateLocationSummary(Zone* zone, 122 static LocationSummary* CreateLocationSummary(
123 intptr_t num_inputs, 123 Zone* zone,
124 bool has_result) { 124 intptr_t num_inputs,
125 Location output = Location::NoLocation(),
126 LocationSummary::ContainsCall contains_call = LocationSummary::kNoCall) {
125 const intptr_t kNumTemps = 0; 127 const intptr_t kNumTemps = 0;
126 LocationSummary* locs = new(zone) LocationSummary( 128 LocationSummary* locs = new(zone) LocationSummary(
127 zone, num_inputs, kNumTemps, LocationSummary::kNoCall); 129 zone, num_inputs, kNumTemps, contains_call);
128 for (intptr_t i = 0; i < num_inputs; i++) { 130 for (intptr_t i = 0; i < num_inputs; i++) {
129 locs->set_in(i, Location::RequiresRegister()); 131 locs->set_in(i, (contains_call == LocationSummary::kNoCall) ?
132 Location::RequiresRegister() : Location::RegisterLocation(i));
130 } 133 }
131 if (has_result) { 134 if (!output.IsInvalid()) {
132 locs->set_out(0, Location::RequiresRegister()); 135 // For instructions that call we default to returning result in R0.
136 locs->set_out(0, output);
133 } 137 }
134 return locs; 138 return locs;
135 } 139 }
136 140
137 141
138 #define DEFINE_MAKE_LOCATION_SUMMARY(Name, In, Out) \ 142 #define DEFINE_MAKE_LOCATION_SUMMARY(Name, ...) \
139 LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \ 143 LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \
140 const { \ 144 const { \
141 return CreateLocationSummary(zone, In, Out); \ 145 return CreateLocationSummary(zone, __VA_ARGS__); \
142 } \ 146 } \
143 147
144 #define EMIT_NATIVE_CODE(Name, In, Out) \ 148 #define EMIT_NATIVE_CODE(Name, ...) \
145 DEFINE_MAKE_LOCATION_SUMMARY(Name, In, Out); \ 149 DEFINE_MAKE_LOCATION_SUMMARY(Name, __VA_ARGS__); \
146 void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) \ 150 void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) \
147 151
148 #define DEFINE_UNIMPLEMENTED_MAKE_LOCATION_SUMMARY(Name) \ 152 #define DEFINE_UNIMPLEMENTED_MAKE_LOCATION_SUMMARY(Name) \
149 LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \ 153 LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \
150 const { \ 154 const { \
151 UNIMPLEMENTED(); \ 155 if (!opt) UNIMPLEMENTED(); \
152 return NULL; \ 156 return NULL; \
153 } \ 157 } \
154 158
155 #define DEFINE_UNIMPLEMENTED_EMIT_NATIVE_CODE(Name) \ 159 #define DEFINE_UNIMPLEMENTED_EMIT_NATIVE_CODE(Name) \
156 void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) { \ 160 void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) { \
157 UNIMPLEMENTED(); \ 161 UNIMPLEMENTED(); \
158 } 162 }
159 163
160 #define DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(Name) \ 164 #define DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(Name) \
161 void Name##Instr::EmitBranchCode(FlowGraphCompiler*, BranchInstr*) { \ 165 void Name##Instr::EmitBranchCode(FlowGraphCompiler*, BranchInstr*) { \
(...skipping 12 matching lines...) Expand all
174 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED) 178 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED)
175 179
176 #undef DEFINE_UNIMPLEMENTED 180 #undef DEFINE_UNIMPLEMENTED
177 181
178 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(TestCids) 182 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(TestCids)
179 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(TestSmi) 183 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(TestSmi)
180 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(RelationalOp) 184 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(RelationalOp)
181 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(EqualityCompare) 185 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(EqualityCompare)
182 186
183 187
184 DEFINE_MAKE_LOCATION_SUMMARY(AssertAssignable, 2, true); 188 DEFINE_MAKE_LOCATION_SUMMARY(AssertAssignable, 2, Location::SameAsFirstInput());
185 189
186 190
187 EMIT_NATIVE_CODE(AssertBoolean, 1, true) { 191 EMIT_NATIVE_CODE(AssertBoolean,
192 1, Location::SameAsFirstInput(),
193 LocationSummary::kCall) {
194 if (compiler->is_optimizing()) {
195 __ Push(locs()->in(0).reg());
196 }
188 __ AssertBoolean(Isolate::Current()->type_checks() ? 1 : 0); 197 __ AssertBoolean(Isolate::Current()->type_checks() ? 1 : 0);
198 compiler->RecordSafepoint(locs());
189 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 199 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
190 deopt_id(), 200 deopt_id(),
191 token_pos()); 201 token_pos());
202 if (compiler->is_optimizing()) {
203 __ Drop1();
204 }
192 } 205 }
193 206
194 207
195 LocationSummary* PolymorphicInstanceCallInstr::MakeLocationSummary(Zone* zone, 208 LocationSummary* PolymorphicInstanceCallInstr::MakeLocationSummary(
196 bool optimizing) const { 209 Zone* zone, bool optimizing) const {
197 return MakeCallSummary(zone); 210 return MakeCallSummary(zone);
198 } 211 }
199 212
200 213
201 void PolymorphicInstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 214 void PolymorphicInstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
202 UNIMPLEMENTED(); 215 compiler->Bailout(ToCString());
203 } 216 }
204 217
205 218
206 EMIT_NATIVE_CODE(CheckStackOverflow, 0, false) { 219 EMIT_NATIVE_CODE(CheckStackOverflow,
220 0, Location::NoLocation(),
221 LocationSummary::kCall) {
207 __ CheckStack(); 222 __ CheckStack();
223 compiler->RecordSafepoint(locs());
208 compiler->AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, 224 compiler->AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall,
209 Thread::kNoDeoptId, 225 Thread::kNoDeoptId,
210 token_pos()); 226 token_pos());
211 } 227 }
212 228
213 229
214 EMIT_NATIVE_CODE(PushArgument, 1, false) { 230 EMIT_NATIVE_CODE(PushArgument, 1) {
215 if (compiler->is_optimizing()) { 231 if (compiler->is_optimizing()) {
216 __ Push(locs()->in(0).reg()); 232 __ Push(locs()->in(0).reg());
217 } 233 }
218 } 234 }
219 235
220 236
221 EMIT_NATIVE_CODE(LoadLocal, 0, false) { 237 EMIT_NATIVE_CODE(LoadLocal, 0) {
222 ASSERT(!compiler->is_optimizing()); 238 ASSERT(!compiler->is_optimizing());
223 ASSERT(local().index() != 0); 239 ASSERT(local().index() != 0);
224 __ Push((local().index() > 0) ? (-local().index()) : (-local().index() - 1)); 240 __ Push((local().index() > 0) ? (-local().index()) : (-local().index() - 1));
225 } 241 }
226 242
227 243
228 EMIT_NATIVE_CODE(StoreLocal, 0, false) { 244 EMIT_NATIVE_CODE(StoreLocal, 0) {
229 ASSERT(!compiler->is_optimizing()); 245 ASSERT(!compiler->is_optimizing());
230 ASSERT(local().index() != 0); 246 ASSERT(local().index() != 0);
231 if (HasTemp()) { 247 if (HasTemp()) {
232 __ StoreLocal( 248 __ StoreLocal(
233 (local().index() > 0) ? (-local().index()) : (-local().index() - 1)); 249 (local().index() > 0) ? (-local().index()) : (-local().index() - 1));
234 } else { 250 } else {
235 __ PopLocal( 251 __ PopLocal(
236 (local().index() > 0) ? (-local().index()) : (-local().index() - 1)); 252 (local().index() > 0) ? (-local().index()) : (-local().index() - 1));
237 } 253 }
238 } 254 }
239 255
240 256
241 EMIT_NATIVE_CODE(LoadClassId, 1, true) { 257 EMIT_NATIVE_CODE(LoadClassId, 1, Location::RequiresRegister()) {
242 if (compiler->is_optimizing()) { 258 if (compiler->is_optimizing()) {
243 __ LoadClassId(locs()->out(0).reg(), locs()->in(0).reg()); 259 __ LoadClassId(locs()->out(0).reg(), locs()->in(0).reg());
244 } else { 260 } else {
245 __ LoadClassIdTOS(); 261 __ LoadClassIdTOS();
246 } 262 }
247 } 263 }
248 264
249 265
250 EMIT_NATIVE_CODE(Constant, 0, true) { 266 EMIT_NATIVE_CODE(Constant, 0, Location::RequiresRegister()) {
251 const intptr_t kidx = __ AddConstant(value());
252 if (compiler->is_optimizing()) { 267 if (compiler->is_optimizing()) {
253 __ LoadConstant(locs()->out(0).reg(), kidx); 268 __ LoadConstant(locs()->out(0).reg(), value());
254 } else { 269 } else {
255 __ PushConstant(kidx); 270 __ PushConstant(value());
256 } 271 }
257 } 272 }
258 273
259 274
260 EMIT_NATIVE_CODE(Return, 1, false) { 275 EMIT_NATIVE_CODE(Return, 1) {
261 __ ReturnTOS(); 276 if (compiler->is_optimizing()) {
277 __ Return(locs()->in(0).reg());
278 } else {
279 __ ReturnTOS();
280 }
262 } 281 }
263 282
264 283
265 EMIT_NATIVE_CODE(StoreStaticField, 1, false) { 284 LocationSummary* StoreStaticFieldInstr::MakeLocationSummary(
266 const intptr_t kidx = __ AddConstant(field()); 285 Zone* zone, bool opt) const {
267 __ StoreStaticTOS(kidx); 286 const intptr_t kNumInputs = 1;
287 const intptr_t kNumTemps = 1;
288 LocationSummary* locs = new(zone) LocationSummary(
289 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall);
290 for (intptr_t i = 0; i < kNumInputs; i++) {
291 locs->set_in(i, Location::RequiresRegister());
292 }
293 for (intptr_t i = 0; i < kNumTemps; i++) {
294 locs->set_temp(i, Location::RequiresRegister());
295 }
296 return locs;
268 } 297 }
269 298
270 299
271 EMIT_NATIVE_CODE(LoadStaticField, 1, true) { 300 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
272 const intptr_t kidx = __ AddConstant(StaticField()); 301 if (compiler->is_optimizing()) {
273 __ PushStatic(kidx); 302 __ LoadConstant(locs()->temp(0).reg(),
303 Field::ZoneHandle(field().Original()));
304 __ StoreField(locs()->temp(0).reg(),
305 Field::static_value_offset() / kWordSize,
306 locs()->in(0).reg());
307 } else {
308 const intptr_t kidx = __ AddConstant(field());
309 __ StoreStaticTOS(kidx);
310 }
274 } 311 }
275 312
276 313
277 EMIT_NATIVE_CODE(InitStaticField, 0, false) { 314 EMIT_NATIVE_CODE(LoadStaticField, 1, Location::RequiresRegister()) {
315 if (compiler->is_optimizing()) {
316 __ LoadField(locs()->out(0).reg(),
317 locs()->in(0).reg(),
318 Field::static_value_offset() / kWordSize);
319 } else {
320 const intptr_t kidx = __ AddConstant(StaticField());
321 __ PushStatic(kidx);
322 }
323 }
324
325
326 EMIT_NATIVE_CODE(InitStaticField, 0) {
278 ASSERT(!compiler->is_optimizing()); 327 ASSERT(!compiler->is_optimizing());
279 __ InitStaticTOS(); 328 __ InitStaticTOS();
280 } 329 }
281 330
282 331
283 EMIT_NATIVE_CODE(ClosureCall, 0, false) { 332 EMIT_NATIVE_CODE(ClosureCall,
333 1,
334 Location::RegisterLocation(0),
335 LocationSummary::kCall) {
336 if (compiler->is_optimizing()) {
337 __ Push(locs()->in(0).reg());
338 }
339
284 intptr_t argument_count = ArgumentCount(); 340 intptr_t argument_count = ArgumentCount();
285 const Array& arguments_descriptor = 341 const Array& arguments_descriptor =
286 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, 342 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count,
287 argument_names())); 343 argument_names()));
288 const intptr_t argdesc_kidx = 344 const intptr_t argdesc_kidx =
289 compiler->assembler()->AddConstant(arguments_descriptor); 345 compiler->assembler()->AddConstant(arguments_descriptor);
290 __ StaticCall(argument_count, argdesc_kidx); 346 __ StaticCall(argument_count, argdesc_kidx);
347 compiler->RecordAfterCall(this);
291 348
292 compiler->RecordSafepoint(locs());
293 // Marks either the continuation point in unoptimized code or the
294 // deoptimization point in optimized code, after call.
295 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id());
296 if (compiler->is_optimizing()) { 349 if (compiler->is_optimizing()) {
297 compiler->AddDeoptIndexAtCall(deopt_id_after, token_pos()); 350 __ PopLocal(locs()->out(0).reg());
298 } 351 }
299 // Add deoptimization continuation point after the call and before the
300 // arguments are removed.
301 // In optimized code this descriptor is needed for exception handling.
302 compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt,
303 deopt_id_after,
304 token_pos());
305 } 352 }
306 353
307 354
308 static void EmitBranchOnCondition(FlowGraphCompiler* compiler, 355 static void EmitBranchOnCondition(FlowGraphCompiler* compiler,
309 Condition true_condition, 356 Condition true_condition,
310 BranchLabels labels) { 357 BranchLabels labels) {
311 if (labels.fall_through == labels.false_label) { 358 if (labels.fall_through == labels.false_label) {
312 // If the next block is the false successor, fall through to it. 359 // If the next block is the false successor, fall through to it.
313 __ Jump(labels.true_label); 360 __ Jump(labels.true_label);
314 } else { 361 } else {
315 // If the next block is not the false successor, branch to it. 362 // If the next block is not the false successor, branch to it.
316 __ Jump(labels.false_label); 363 __ Jump(labels.false_label);
317 364
318 // Fall through or jump to the true successor. 365 // Fall through or jump to the true successor.
319 if (labels.fall_through != labels.true_label) { 366 if (labels.fall_through != labels.true_label) {
320 __ Jump(labels.true_label); 367 __ Jump(labels.true_label);
321 } 368 }
322 } 369 }
323 } 370 }
324 371
325 372
326 Condition StrictCompareInstr::EmitComparisonCode(FlowGraphCompiler* compiler, 373 Condition StrictCompareInstr::EmitComparisonCode(FlowGraphCompiler* compiler,
327 BranchLabels labels) { 374 BranchLabels labels) {
328 ASSERT((kind() == Token::kNE_STRICT) || 375 ASSERT((kind() == Token::kNE_STRICT) ||
329 (kind() == Token::kEQ_STRICT)); 376 (kind() == Token::kEQ_STRICT));
330 const Bytecode::Opcode eq_op = needs_number_check() ?
331 Bytecode::kIfEqStrictNumTOS : Bytecode::kIfEqStrictTOS;
332 const Bytecode::Opcode ne_op = needs_number_check() ?
333 Bytecode::kIfNeStrictNumTOS : Bytecode::kIfNeStrictTOS;
334 377
335 if (kind() == Token::kEQ_STRICT) { 378 if (!compiler->is_optimizing()) {
336 __ Emit((labels.fall_through == labels.false_label) ? eq_op : ne_op); 379 const Bytecode::Opcode eq_op = needs_number_check() ?
380 Bytecode::kIfEqStrictNumTOS : Bytecode::kIfEqStrictTOS;
381 const Bytecode::Opcode ne_op = needs_number_check() ?
382 Bytecode::kIfNeStrictNumTOS : Bytecode::kIfNeStrictTOS;
383
384 if (kind() == Token::kEQ_STRICT) {
385 __ Emit((labels.fall_through == labels.false_label) ? eq_op : ne_op);
386 } else {
387 __ Emit((labels.fall_through == labels.false_label) ? ne_op : eq_op);
388 }
337 } else { 389 } else {
338 __ Emit((labels.fall_through == labels.false_label) ? ne_op : eq_op); 390 const Bytecode::Opcode eq_op = needs_number_check() ?
391 Bytecode::kIfEqStrictNum : Bytecode::kIfEqStrict;
392 const Bytecode::Opcode ne_op = needs_number_check() ?
393 Bytecode::kIfNeStrictNum : Bytecode::kIfNeStrict;
394
395 if (kind() == Token::kEQ_STRICT) {
396 __ Emit(Bytecode::Encode(
397 (labels.fall_through == labels.false_label) ? eq_op : ne_op,
398 locs()->in(0).reg(),
399 locs()->in(1).reg()));
400 } else {
401 __ Emit(Bytecode::Encode(
402 (labels.fall_through == labels.false_label) ? ne_op : eq_op,
403 locs()->in(0).reg(),
404 locs()->in(1).reg()));
405 }
339 } 406 }
340 407
341 if (needs_number_check() && token_pos().IsReal()) { 408 if (needs_number_check() && token_pos().IsReal()) {
409 compiler->RecordSafepoint(locs());
342 compiler->AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, 410 compiler->AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall,
343 Thread::kNoDeoptId, 411 Thread::kNoDeoptId,
344 token_pos()); 412 token_pos());
345 } 413 }
346 return EQ; 414 return EQ;
347 } 415 }
348 416
349 417
350 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, 418 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler,
351 BranchInstr* branch) { 419 BranchInstr* branch) {
352 ASSERT((kind() == Token::kEQ_STRICT) || 420 ASSERT((kind() == Token::kEQ_STRICT) ||
353 (kind() == Token::kNE_STRICT)); 421 (kind() == Token::kNE_STRICT));
354 422
355 BranchLabels labels = compiler->CreateBranchLabels(branch); 423 BranchLabels labels = compiler->CreateBranchLabels(branch);
356 Condition true_condition = EmitComparisonCode(compiler, labels); 424 Condition true_condition = EmitComparisonCode(compiler, labels);
357 EmitBranchOnCondition(compiler, true_condition, labels); 425 EmitBranchOnCondition(compiler, true_condition, labels);
358 } 426 }
359 427
360 428
361 EMIT_NATIVE_CODE(StrictCompare, 2, true) { 429 EMIT_NATIVE_CODE(StrictCompare,
430 2,
431 Location::RequiresRegister(),
432 needs_number_check() ? LocationSummary::kCall
433 : LocationSummary::kNoCall) {
362 ASSERT((kind() == Token::kEQ_STRICT) || 434 ASSERT((kind() == Token::kEQ_STRICT) ||
363 (kind() == Token::kNE_STRICT)); 435 (kind() == Token::kNE_STRICT));
364 436
365 Label is_true, is_false; 437 Label is_true, is_false;
366 BranchLabels labels = { &is_true, &is_false, &is_false }; 438 BranchLabels labels = { &is_true, &is_false, &is_false };
367 Condition true_condition = EmitComparisonCode(compiler, labels); 439 Condition true_condition = EmitComparisonCode(compiler, labels);
368 EmitBranchOnCondition(compiler, true_condition, labels); 440 EmitBranchOnCondition(compiler, true_condition, labels);
369 Label done; 441 Label done;
370 __ Bind(&is_false); 442 if (compiler->is_optimizing()) {
371 __ PushConstant(Bool::False()); 443 const Register result = locs()->out(0).reg();
372 __ Jump(&done); 444 __ Bind(&is_false);
373 __ Bind(&is_true); 445 __ LoadConstant(result, Bool::False());
374 __ PushConstant(Bool::True()); 446 __ Jump(&done);
375 __ Bind(&done); 447 __ Bind(&is_true);
448 __ LoadConstant(result, Bool::True());
449 __ Bind(&done);
450 } else {
451 __ Bind(&is_false);
452 __ PushConstant(Bool::False());
453 __ Jump(&done);
454 __ Bind(&is_true);
455 __ PushConstant(Bool::True());
456 __ Bind(&done);
457 }
376 } 458 }
377 459
378 460
379 LocationSummary* BranchInstr::MakeLocationSummary(Zone* zone, 461 LocationSummary* BranchInstr::MakeLocationSummary(Zone* zone,
380 bool opt) const { 462 bool opt) const {
381 comparison()->InitializeLocationSummary(zone, opt); 463 comparison()->InitializeLocationSummary(zone, opt);
464 if (!comparison()->HasLocs()) return NULL;
zra 2016/05/19 16:24:27 Please use {}
Vyacheslav Egorov (Google) 2016/05/20 12:11:47 Done.
382 // Branches don't produce a result. 465 // Branches don't produce a result.
383 comparison()->locs()->set_out(0, Location::NoLocation()); 466 comparison()->locs()->set_out(0, Location::NoLocation());
384 return comparison()->locs(); 467 return comparison()->locs();
385 } 468 }
386 469
387 470
388 void BranchInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 471 void BranchInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
389 comparison()->EmitBranchCode(compiler, this); 472 comparison()->EmitBranchCode(compiler, this);
390 } 473 }
391 474
392 475
393 EMIT_NATIVE_CODE(Goto, 0, false) { 476 EMIT_NATIVE_CODE(Goto, 0) {
394 if (HasParallelMove()) { 477 if (HasParallelMove()) {
395 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); 478 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
396 } 479 }
397 // We can fall through if the successor is the next block in the list. 480 // We can fall through if the successor is the next block in the list.
398 // Otherwise, we need a jump. 481 // Otherwise, we need a jump.
399 if (!compiler->CanFallThroughTo(successor())) { 482 if (!compiler->CanFallThroughTo(successor())) {
400 __ Jump(compiler->GetJumpLabel(successor())); 483 __ Jump(compiler->GetJumpLabel(successor()));
401 } 484 }
402 } 485 }
403 486
404 487
405 EMIT_NATIVE_CODE(CreateArray, 2, true) { 488 EMIT_NATIVE_CODE(CreateArray,
489 2, Location::RequiresRegister(),
490 LocationSummary::kCall) {
491 if (compiler->is_optimizing()) {
492 __ Push(locs()->in(0).reg());
493 __ Push(locs()->in(1).reg());
494 }
406 __ CreateArrayTOS(); 495 __ CreateArrayTOS();
496 compiler->RecordSafepoint(locs());
497 if (compiler->is_optimizing()) {
498 __ PopLocal(locs()->out(0).reg());
499 }
407 } 500 }
408 501
409 502
410 EMIT_NATIVE_CODE(StoreIndexed, 3, false) { 503 EMIT_NATIVE_CODE(StoreIndexed, 3) {
411 ASSERT(class_id() == kArrayCid); 504 if (compiler->is_optimizing()) {
412 __ StoreIndexedTOS(); 505 if (class_id() != kArrayCid) {
506 compiler->Bailout(ToCString());
507 }
508
509 __ StoreIndexed(locs()->in(kArrayPos).reg(),
510 locs()->in(kIndexPos).reg(),
511 locs()->in(kValuePos).reg());
512 } else {
513 ASSERT(class_id() == kArrayCid);
514 __ StoreIndexedTOS();
515 }
413 } 516 }
414 517
415 518
416 EMIT_NATIVE_CODE(StringInterpolate, 0, false) { 519 EMIT_NATIVE_CODE(StringInterpolate,
520 1, Location::RegisterLocation(0),
521 LocationSummary::kCall) {
522 if (compiler->is_optimizing()) {
523 __ Push(locs()->in(0).reg());
524 }
417 const intptr_t kArgumentCount = 1; 525 const intptr_t kArgumentCount = 1;
418 const Array& arguments_descriptor = Array::Handle( 526 const Array& arguments_descriptor = Array::Handle(
419 ArgumentsDescriptor::New(kArgumentCount, Object::null_array())); 527 ArgumentsDescriptor::New(kArgumentCount, Object::null_array()));
420 __ PushConstant(CallFunction()); 528 __ PushConstant(CallFunction());
421 const intptr_t argdesc_kidx = __ AddConstant(arguments_descriptor); 529 const intptr_t argdesc_kidx = __ AddConstant(arguments_descriptor);
422 __ StaticCall(kArgumentCount, argdesc_kidx); 530 __ StaticCall(kArgumentCount, argdesc_kidx);
531 compiler->RecordAfterCall(this);
532
533 if (compiler->is_optimizing()) {
534 __ PopLocal(locs()->out(0).reg());
535 }
423 } 536 }
424 537
425 538
426 EMIT_NATIVE_CODE(NativeCall, 0, false) { 539 EMIT_NATIVE_CODE(NativeCall,
540 0, Location::NoLocation(),
541 LocationSummary::kCall) {
427 SetupNative(); 542 SetupNative();
428 543
429 const intptr_t argc_tag = NativeArguments::ComputeArgcTag(function()); 544 const intptr_t argc_tag = NativeArguments::ComputeArgcTag(function());
430 545
431 ASSERT(!link_lazily()); 546 ASSERT(!link_lazily());
432 const ExternalLabel label(reinterpret_cast<uword>(native_c_function())); 547 const ExternalLabel label(reinterpret_cast<uword>(native_c_function()));
433 const intptr_t target_kidx = 548 const intptr_t target_kidx =
434 __ object_pool_wrapper().FindImmediate(label.address()); 549 __ object_pool_wrapper().FindImmediate(label.address());
435 const intptr_t argc_tag_kidx = 550 const intptr_t argc_tag_kidx =
436 __ object_pool_wrapper().FindImmediate(static_cast<uword>(argc_tag)); 551 __ object_pool_wrapper().FindImmediate(static_cast<uword>(argc_tag));
437 __ PushConstant(target_kidx); 552 __ PushConstant(target_kidx);
438 __ PushConstant(argc_tag_kidx); 553 __ PushConstant(argc_tag_kidx);
439 if (is_bootstrap_native()) { 554 if (is_bootstrap_native()) {
440 __ NativeBootstrapCall(); 555 __ NativeBootstrapCall();
441 } else { 556 } else {
442 __ NativeCall(); 557 __ NativeCall();
443 } 558 }
559 compiler->RecordSafepoint(locs());
444 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 560 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
445 Thread::kNoDeoptId, 561 Thread::kNoDeoptId,
446 token_pos()); 562 token_pos());
447 } 563 }
448 564
449 565
450 EMIT_NATIVE_CODE(AllocateObject, 0, true) { 566 EMIT_NATIVE_CODE(AllocateObject,
567 0, Location::RequiresRegister(),
568 LocationSummary::kCall) {
451 if (ArgumentCount() == 1) { 569 if (ArgumentCount() == 1) {
452 __ PushConstant(cls()); 570 __ PushConstant(cls());
453 __ AllocateT(); 571 __ AllocateT();
454 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 572 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
455 Thread::kNoDeoptId, 573 Thread::kNoDeoptId,
456 token_pos()); 574 token_pos());
457 } else { 575 } else {
458 const intptr_t kidx = __ AddConstant(cls()); 576 const intptr_t kidx = __ AddConstant(cls());
459 __ Allocate(kidx); 577 __ Allocate(kidx);
460 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 578 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
461 Thread::kNoDeoptId, 579 Thread::kNoDeoptId,
462 token_pos()); 580 token_pos());
463 } 581 }
582 compiler->RecordSafepoint(locs());
583 if (compiler->is_optimizing()) {
584 __ PopLocal(locs()->out(0).reg());
585 }
464 } 586 }
465 587
466 588
467 EMIT_NATIVE_CODE(StoreInstanceField, 2, false) { 589 EMIT_NATIVE_CODE(StoreInstanceField, 2) {
468 ASSERT(!HasTemp()); 590 ASSERT(!HasTemp());
469 ASSERT(offset_in_bytes() % kWordSize == 0); 591 ASSERT(offset_in_bytes() % kWordSize == 0);
470 if (compiler->is_optimizing()) { 592 if (compiler->is_optimizing()) {
471 const Register value = locs()->in(1).reg(); 593 const Register value = locs()->in(1).reg();
472 const Register instance = locs()->in(0).reg(); 594 const Register instance = locs()->in(0).reg();
473 __ StoreField(instance, offset_in_bytes() / kWordSize, value); 595 __ StoreField(instance, offset_in_bytes() / kWordSize, value);
474 } else { 596 } else {
475 __ StoreFieldTOS(offset_in_bytes() / kWordSize); 597 __ StoreFieldTOS(offset_in_bytes() / kWordSize);
476 } 598 }
477 } 599 }
478 600
479 601
480 EMIT_NATIVE_CODE(LoadField, 1, true) { 602 EMIT_NATIVE_CODE(LoadField, 1, Location::RequiresRegister()) {
481 ASSERT(offset_in_bytes() % kWordSize == 0); 603 ASSERT(offset_in_bytes() % kWordSize == 0);
482 __ LoadFieldTOS(offset_in_bytes() / kWordSize); 604 if (compiler->is_optimizing()) {
605 const Register result = locs()->out(0).reg();
606 const Register instance = locs()->in(0).reg();
607 __ LoadField(result, instance, offset_in_bytes() / kWordSize);
608 } else {
609 __ LoadFieldTOS(offset_in_bytes() / kWordSize);
610 }
483 } 611 }
484 612
485 613
486 EMIT_NATIVE_CODE(BooleanNegate, 1, true) { 614 EMIT_NATIVE_CODE(BooleanNegate, 1, Location::RequiresRegister()) {
487 __ BooleanNegateTOS(); 615 if (compiler->is_optimizing()) {
616 __ BooleanNegate(locs()->out(0).reg(), locs()->in(0).reg());
617 } else {
618 __ BooleanNegateTOS();
619 }
488 } 620 }
489 621
490 622
491 EMIT_NATIVE_CODE(AllocateContext, 0, false) { 623 EMIT_NATIVE_CODE(AllocateContext,
624 0, Location::RequiresRegister(),
625 LocationSummary::kCall) {
626 ASSERT(!compiler->is_optimizing());
492 __ AllocateContext(num_context_variables()); 627 __ AllocateContext(num_context_variables());
628 compiler->RecordSafepoint(locs());
493 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 629 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
494 Thread::kNoDeoptId, 630 Thread::kNoDeoptId,
495 token_pos()); 631 token_pos());
496 } 632 }
497 633
498 634
499 EMIT_NATIVE_CODE(CloneContext, 0, false) { 635 EMIT_NATIVE_CODE(CloneContext,
636 1, Location::RequiresRegister(),
637 LocationSummary::kCall) {
638 ASSERT(!compiler->is_optimizing());
500 __ CloneContext(); 639 __ CloneContext();
640 compiler->RecordSafepoint(locs());
501 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 641 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
502 Thread::kNoDeoptId, 642 Thread::kNoDeoptId,
503 token_pos()); 643 token_pos());
504 } 644 }
505 645
506 646
507 EMIT_NATIVE_CODE(CatchBlockEntry, 0, false) { 647 EMIT_NATIVE_CODE(CatchBlockEntry, 0) {
508 __ Bind(compiler->GetJumpLabel(this)); 648 __ Bind(compiler->GetJumpLabel(this));
509 compiler->AddExceptionHandler(catch_try_index(), 649 compiler->AddExceptionHandler(catch_try_index(),
510 try_index(), 650 try_index(),
511 compiler->assembler()->CodeSize(), 651 compiler->assembler()->CodeSize(),
512 catch_handler_types_, 652 catch_handler_types_,
513 needs_stacktrace()); 653 needs_stacktrace());
514 __ MoveSpecial(-exception_var().index()-1, 654 __ MoveSpecial(-exception_var().index()-1,
515 Simulator::kExceptionSpecialIndex); 655 Simulator::kExceptionSpecialIndex);
516 __ MoveSpecial(-stacktrace_var().index()-1, 656 __ MoveSpecial(-stacktrace_var().index()-1,
517 Simulator::kStacktraceSpecialIndex); 657 Simulator::kStacktraceSpecialIndex);
518 __ SetFrame(compiler->StackSize()); 658 __ SetFrame(compiler->StackSize());
519 } 659 }
520 660
521 661
522 EMIT_NATIVE_CODE(Throw, 0, false) { 662 EMIT_NATIVE_CODE(Throw, 0, Location::NoLocation(), LocationSummary::kCall) {
523 __ Throw(0); 663 __ Throw(0);
664 compiler->RecordSafepoint(locs());
524 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 665 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
525 deopt_id(), 666 deopt_id(),
526 token_pos()); 667 token_pos());
527 __ Trap(); 668 __ Trap();
528 } 669 }
529 670
530 671
531 EMIT_NATIVE_CODE(ReThrow, 0, false) { 672 EMIT_NATIVE_CODE(ReThrow, 0, Location::NoLocation(), LocationSummary::kCall) {
532 compiler->SetNeedsStacktrace(catch_try_index()); 673 compiler->SetNeedsStacktrace(catch_try_index());
533 __ Throw(1); 674 __ Throw(1);
675 compiler->RecordSafepoint(locs());
534 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 676 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
535 deopt_id(), 677 deopt_id(),
536 token_pos()); 678 token_pos());
537 __ Trap(); 679 __ Trap();
538 } 680 }
539 681
540 EMIT_NATIVE_CODE(InstantiateType, 1, true) { 682 EMIT_NATIVE_CODE(InstantiateType,
683 1, Location::RequiresRegister(),
684 LocationSummary::kCall) {
685 if (compiler->is_optimizing()) {
686 __ Push(locs()->in(0).reg());
687 }
541 __ InstantiateType(__ AddConstant(type())); 688 __ InstantiateType(__ AddConstant(type()));
689 compiler->RecordSafepoint(locs());
542 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 690 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
543 deopt_id(), 691 deopt_id(),
544 token_pos()); 692 token_pos());
693 if (compiler->is_optimizing()) {
694 __ PopLocal(locs()->out(0).reg());
695 }
545 } 696 }
546 697
547 EMIT_NATIVE_CODE(InstantiateTypeArguments, 1, true) { 698 EMIT_NATIVE_CODE(InstantiateTypeArguments,
699 1, Location::RequiresRegister(),
700 LocationSummary::kCall) {
701 if (compiler->is_optimizing()) {
702 __ Push(locs()->in(0).reg());
703 }
548 __ InstantiateTypeArgumentsTOS( 704 __ InstantiateTypeArgumentsTOS(
549 type_arguments().IsRawInstantiatedRaw(type_arguments().Length()), 705 type_arguments().IsRawInstantiatedRaw(type_arguments().Length()),
550 __ AddConstant(type_arguments())); 706 __ AddConstant(type_arguments()));
707 compiler->RecordSafepoint(locs());
551 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 708 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
552 deopt_id(), 709 deopt_id(),
553 token_pos()); 710 token_pos());
711 if (compiler->is_optimizing()) {
712 __ PopLocal(locs()->out(0).reg());
713 }
554 } 714 }
555 715
556 716
557 void DebugStepCheckInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 717 void DebugStepCheckInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
558 __ DebugStep(); 718 __ DebugStep();
559 compiler->AddCurrentDescriptor(stub_kind_, Thread::kNoDeoptId, token_pos()); 719 compiler->AddCurrentDescriptor(stub_kind_, Thread::kNoDeoptId, token_pos());
560 } 720 }
561 721
562 722
563 void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 723 void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
564 if (!compiler->CanFallThroughTo(normal_entry())) { 724 if (!compiler->CanFallThroughTo(normal_entry())) {
565 __ Jump(compiler->GetJumpLabel(normal_entry())); 725 __ Jump(compiler->GetJumpLabel(normal_entry()));
566 } 726 }
567 } 727 }
568 728
569 729
570 LocationSummary* Instruction::MakeCallSummary(Zone* zone) { 730 LocationSummary* Instruction::MakeCallSummary(Zone* zone) {
571 LocationSummary* result = new(zone) LocationSummary( 731 LocationSummary* result = new(zone) LocationSummary(
572 zone, 0, 0, LocationSummary::kCall); 732 zone, 0, 0, LocationSummary::kCall);
573 result->set_out(0, Location::RequiresRegister()); 733 // TODO(vegorov) support allocating out registers for calls.
734 // Currently we require them to be fixed.
735 result->set_out(0, Location::RegisterLocation(0));
574 return result; 736 return result;
575 } 737 }
576 738
577 739
578 CompileType BinaryUint32OpInstr::ComputeType() const { 740 CompileType BinaryUint32OpInstr::ComputeType() const {
579 return CompileType::Int(); 741 return CompileType::Int();
580 } 742 }
581 743
582 744
583 CompileType ShiftUint32OpInstr::ComputeType() const { 745 CompileType ShiftUint32OpInstr::ComputeType() const {
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
617 779
618 case kTypedDataInt8ArrayCid: 780 case kTypedDataInt8ArrayCid:
619 case kTypedDataUint8ArrayCid: 781 case kTypedDataUint8ArrayCid:
620 case kTypedDataUint8ClampedArrayCid: 782 case kTypedDataUint8ClampedArrayCid:
621 case kExternalTypedDataUint8ArrayCid: 783 case kExternalTypedDataUint8ArrayCid:
622 case kExternalTypedDataUint8ClampedArrayCid: 784 case kExternalTypedDataUint8ClampedArrayCid:
623 case kTypedDataInt16ArrayCid: 785 case kTypedDataInt16ArrayCid:
624 case kTypedDataUint16ArrayCid: 786 case kTypedDataUint16ArrayCid:
625 case kOneByteStringCid: 787 case kOneByteStringCid:
626 case kTwoByteStringCid: 788 case kTwoByteStringCid:
789 case kExternalOneByteStringCid:
790 case kExternalTwoByteStringCid:
627 return CompileType::FromCid(kSmiCid); 791 return CompileType::FromCid(kSmiCid);
628 792
629 case kTypedDataInt32ArrayCid: 793 case kTypedDataInt32ArrayCid:
630 case kTypedDataUint32ArrayCid: 794 case kTypedDataUint32ArrayCid:
631 return CompileType::Int(); 795 return CompileType::Int();
632 796
633 default: 797 default:
634 UNREACHABLE(); 798 UNREACHABLE();
635 return CompileType::Dynamic(); 799 return CompileType::Dynamic();
636 } 800 }
637 } 801 }
638 802
639 803
640 Representation LoadIndexedInstr::representation() const { 804 Representation LoadIndexedInstr::representation() const {
641 switch (class_id_) { 805 switch (class_id_) {
642 case kArrayCid: 806 case kArrayCid:
643 case kImmutableArrayCid: 807 case kImmutableArrayCid:
644 case kTypedDataInt8ArrayCid: 808 case kTypedDataInt8ArrayCid:
645 case kTypedDataUint8ArrayCid: 809 case kTypedDataUint8ArrayCid:
646 case kTypedDataUint8ClampedArrayCid: 810 case kTypedDataUint8ClampedArrayCid:
647 case kExternalTypedDataUint8ArrayCid: 811 case kExternalTypedDataUint8ArrayCid:
648 case kExternalTypedDataUint8ClampedArrayCid: 812 case kExternalTypedDataUint8ClampedArrayCid:
649 case kTypedDataInt16ArrayCid: 813 case kTypedDataInt16ArrayCid:
650 case kTypedDataUint16ArrayCid: 814 case kTypedDataUint16ArrayCid:
651 case kOneByteStringCid: 815 case kOneByteStringCid:
652 case kTwoByteStringCid: 816 case kTwoByteStringCid:
817 case kExternalOneByteStringCid:
818 case kExternalTwoByteStringCid:
653 return kTagged; 819 return kTagged;
654 case kTypedDataInt32ArrayCid: 820 case kTypedDataInt32ArrayCid:
655 return kUnboxedInt32; 821 return kUnboxedInt32;
656 case kTypedDataUint32ArrayCid: 822 case kTypedDataUint32ArrayCid:
657 return kUnboxedUint32; 823 return kUnboxedUint32;
658 case kTypedDataFloat32ArrayCid: 824 case kTypedDataFloat32ArrayCid:
659 case kTypedDataFloat64ArrayCid: 825 case kTypedDataFloat64ArrayCid:
660 return kUnboxedDouble; 826 return kUnboxedDouble;
661 case kTypedDataInt32x4ArrayCid: 827 case kTypedDataInt32x4ArrayCid:
662 return kUnboxedInt32x4; 828 return kUnboxedInt32x4;
(...skipping 10 matching lines...) Expand all
673 839
674 Representation StoreIndexedInstr::RequiredInputRepresentation( 840 Representation StoreIndexedInstr::RequiredInputRepresentation(
675 intptr_t idx) const { 841 intptr_t idx) const {
676 // Array can be a Dart object or a pointer to external data. 842 // Array can be a Dart object or a pointer to external data.
677 if (idx == 0) return kNoRepresentation; // Flexible input representation. 843 if (idx == 0) return kNoRepresentation; // Flexible input representation.
678 if (idx == 1) return kTagged; // Index is a smi. 844 if (idx == 1) return kTagged; // Index is a smi.
679 ASSERT(idx == 2); 845 ASSERT(idx == 2);
680 switch (class_id_) { 846 switch (class_id_) {
681 case kArrayCid: 847 case kArrayCid:
682 case kOneByteStringCid: 848 case kOneByteStringCid:
849 case kTwoByteStringCid:
850 case kExternalOneByteStringCid:
851 case kExternalTwoByteStringCid:
683 case kTypedDataInt8ArrayCid: 852 case kTypedDataInt8ArrayCid:
684 case kTypedDataUint8ArrayCid: 853 case kTypedDataUint8ArrayCid:
685 case kExternalTypedDataUint8ArrayCid: 854 case kExternalTypedDataUint8ArrayCid:
686 case kTypedDataUint8ClampedArrayCid: 855 case kTypedDataUint8ClampedArrayCid:
687 case kExternalTypedDataUint8ClampedArrayCid: 856 case kExternalTypedDataUint8ClampedArrayCid:
688 case kTypedDataInt16ArrayCid: 857 case kTypedDataInt16ArrayCid:
689 case kTypedDataUint16ArrayCid: 858 case kTypedDataUint16ArrayCid:
690 return kTagged; 859 return kTagged;
691 case kTypedDataInt32ArrayCid: 860 case kTypedDataInt32ArrayCid:
692 return kUnboxedInt32; 861 return kUnboxedInt32;
(...skipping 10 matching lines...) Expand all
703 return kUnboxedFloat64x2; 872 return kUnboxedFloat64x2;
704 default: 873 default:
705 UNREACHABLE(); 874 UNREACHABLE();
706 return kTagged; 875 return kTagged;
707 } 876 }
708 } 877 }
709 878
710 } // namespace dart 879 } // namespace dart
711 880
712 #endif // defined TARGET_ARCH_DBC 881 #endif // defined TARGET_ARCH_DBC
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698