OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/flow_graph_optimizer.h" | 5 #include "vm/flow_graph_optimizer.h" |
6 | 6 |
7 #include "vm/bit_vector.h" | 7 #include "vm/bit_vector.h" |
8 #include "vm/cha.h" | 8 #include "vm/cha.h" |
9 #include "vm/cpu.h" | 9 #include "vm/cpu.h" |
10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
(...skipping 249 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
260 | 260 |
261 static bool IsPositiveOrZeroSmiConst(Definition* d) { | 261 static bool IsPositiveOrZeroSmiConst(Definition* d) { |
262 ConstantInstr* const_instr = d->AsConstant(); | 262 ConstantInstr* const_instr = d->AsConstant(); |
263 if ((const_instr != NULL) && (const_instr->value().IsSmi())) { | 263 if ((const_instr != NULL) && (const_instr->value().IsSmi())) { |
264 return Smi::Cast(const_instr->value()).Value() >= 0; | 264 return Smi::Cast(const_instr->value()).Value() >= 0; |
265 } | 265 } |
266 return false; | 266 return false; |
267 } | 267 } |
268 | 268 |
269 | 269 |
| 270 Definition* FlowGraphOptimizer::OptimizeMint32BitMasks( |
| 271 BinaryMintOpInstr* mask) { |
| 272 ASSERT(mask != NULL); |
| 273 ASSERT(mask->op_kind() == Token::kBIT_AND); |
| 274 Range* range = mask->range(); |
| 275 if ((range == NULL) || !range->Is32BitMask()) { |
| 276 // No range or range is not exactly [0, 0xFFFFFFFF]. |
| 277 return NULL; |
| 278 } |
| 279 // Find the mask target by looking for the input that isn't the constant |
| 280 // 0xFFFFFFFF. |
| 281 Value* target = NULL; |
| 282 if (!mask->InputAt(0)->BindsTo32BitMaskConstant()) { |
| 283 target = mask->InputAt(0); |
| 284 } else if (!mask->InputAt(1)->BindsTo32BitMaskConstant()) { |
| 285 target = mask->InputAt(1); |
| 286 } else { |
| 287 return NULL; |
| 288 } |
| 289 // If the target has multiple uses, we can't assume its range can be safely |
| 290 // constrained by this mask. |
| 291 if (!target->IsSingleUse()) { |
| 292 return NULL; |
| 293 } |
| 294 |
| 295 Definition* def = target->definition(); |
| 296 if (!def->IsMintDefinition()) { |
| 297 // TODO(johnmccutchan): Should this be an ASSERT? |
| 298 return NULL; |
| 299 } |
| 300 def->range()->Make32BitMask(); |
| 301 |
| 302 return def; |
| 303 } |
| 304 |
| 305 |
| 306 bool FlowGraphOptimizer::TryMarkMint32Bit(Definition* mintop) { |
| 307 ASSERT(mintop != NULL); |
| 308 ASSERT(mintop->IsMintDefinition()); |
| 309 |
| 310 Range* range = mintop->range(); |
| 311 if (range == NULL) { |
| 312 return false; |
| 313 } |
| 314 bool is_32_bit = false; |
| 315 if (range->Is32BitMask()) { |
| 316 is_32_bit = true; |
| 317 } else if (range->IsWithin(0, kMaxUint32)) { |
| 318 is_32_bit = true; |
| 319 } |
| 320 |
| 321 if (!is_32_bit) { |
| 322 return false; |
| 323 } |
| 324 |
| 325 if (mintop->IsBinaryMintOp()) { |
| 326 BinaryMintOpInstr* instr = mintop->AsBinaryMintOp(); |
| 327 instr->set_is_32_bit(true); |
| 328 } else if (mintop->IsShiftMintOp()) { |
| 329 ShiftMintOpInstr* instr = mintop->AsShiftMintOp(); |
| 330 instr->set_is_32_bit(true); |
| 331 } else if (mintop->IsUnboxInteger()) { |
| 332 UnboxIntegerInstr* instr = mintop->AsUnboxInteger(); |
| 333 instr->set_is_32_bit(true); |
| 334 } else if (mintop->IsUnaryMintOp()) { |
| 335 UnaryMintOpInstr* instr = mintop->AsUnaryMintOp(); |
| 336 instr->set_is_32_bit(true); |
| 337 } else { |
| 338 UNREACHABLE(); |
| 339 } |
| 340 |
| 341 return true; |
| 342 } |
| 343 |
| 344 |
270 void FlowGraphOptimizer::OptimizeLeftShiftBitAndSmiOp( | 345 void FlowGraphOptimizer::OptimizeLeftShiftBitAndSmiOp( |
271 Definition* bit_and_instr, | 346 Definition* bit_and_instr, |
272 Definition* left_instr, | 347 Definition* left_instr, |
273 Definition* right_instr) { | 348 Definition* right_instr) { |
274 ASSERT(bit_and_instr != NULL); | 349 ASSERT(bit_and_instr != NULL); |
275 ASSERT((left_instr != NULL) && (right_instr != NULL)); | 350 ASSERT((left_instr != NULL) && (right_instr != NULL)); |
276 | 351 |
277 // Check for pattern, smi_shift_left must be single-use. | 352 // Check for pattern, smi_shift_left must be single-use. |
278 bool is_positive_or_zero = IsPositiveOrZeroSmiConst(left_instr); | 353 bool is_positive_or_zero = IsPositiveOrZeroSmiConst(left_instr); |
279 if (!is_positive_or_zero) { | 354 if (!is_positive_or_zero) { |
280 is_positive_or_zero = IsPositiveOrZeroSmiConst(right_instr); | 355 is_positive_or_zero = IsPositiveOrZeroSmiConst(right_instr); |
281 } | 356 } |
282 if (!is_positive_or_zero) return; | 357 if (!is_positive_or_zero) { |
| 358 return; |
| 359 } |
283 | 360 |
284 BinarySmiOpInstr* smi_shift_left = NULL; | 361 BinarySmiOpInstr* smi_shift_left = NULL; |
285 if (bit_and_instr->InputAt(0)->IsSingleUse()) { | 362 if (bit_and_instr->InputAt(0)->IsSingleUse()) { |
286 smi_shift_left = AsSmiShiftLeftInstruction(left_instr); | 363 smi_shift_left = AsSmiShiftLeftInstruction(left_instr); |
287 } | 364 } |
288 if ((smi_shift_left == NULL) && (bit_and_instr->InputAt(1)->IsSingleUse())) { | 365 if ((smi_shift_left == NULL) && (bit_and_instr->InputAt(1)->IsSingleUse())) { |
289 smi_shift_left = AsSmiShiftLeftInstruction(right_instr); | 366 smi_shift_left = AsSmiShiftLeftInstruction(right_instr); |
290 } | 367 } |
291 if (smi_shift_left == NULL) return; | 368 if (smi_shift_left == NULL) { |
| 369 return; |
| 370 } |
292 | 371 |
293 // Pattern recognized. | 372 // Pattern recognized. |
294 smi_shift_left->set_is_truncating(true); | 373 smi_shift_left->set_is_truncating(true); |
295 ASSERT(bit_and_instr->IsBinarySmiOp() || bit_and_instr->IsBinaryMintOp()); | 374 ASSERT(bit_and_instr->IsBinarySmiOp() || bit_and_instr->IsBinaryMintOp()); |
296 if (bit_and_instr->IsBinaryMintOp()) { | 375 if (bit_and_instr->IsBinaryMintOp()) { |
297 // Replace Mint op with Smi op. | 376 // Replace Mint op with Smi op. |
298 BinarySmiOpInstr* smi_op = new(I) BinarySmiOpInstr( | 377 BinarySmiOpInstr* smi_op = new(I) BinarySmiOpInstr( |
299 Token::kBIT_AND, | 378 Token::kBIT_AND, |
300 new(I) Value(left_instr), | 379 new(I) Value(left_instr), |
301 new(I) Value(right_instr), | 380 new(I) Value(right_instr), |
(...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
518 } | 597 } |
519 } | 598 } |
520 } | 599 } |
521 } | 600 } |
522 TryMergeTruncDivMod(&div_mod_merge); | 601 TryMergeTruncDivMod(&div_mod_merge); |
523 TryMergeMathUnary(&sin_cos_merge); | 602 TryMergeMathUnary(&sin_cos_merge); |
524 current_iterator_ = NULL; | 603 current_iterator_ = NULL; |
525 } | 604 } |
526 } | 605 } |
527 | 606 |
528 | |
529 static void EnsureSSATempIndex(FlowGraph* graph, | 607 static void EnsureSSATempIndex(FlowGraph* graph, |
530 Definition* defn, | 608 Definition* defn, |
531 Definition* replacement) { | 609 Definition* replacement) { |
532 if ((replacement->ssa_temp_index() == -1) && | 610 if ((replacement->ssa_temp_index() == -1) && |
533 (defn->ssa_temp_index() != -1)) { | 611 (defn->ssa_temp_index() != -1)) { |
534 replacement->set_ssa_temp_index(graph->alloc_ssa_temp_index()); | 612 replacement->set_ssa_temp_index(graph->alloc_ssa_temp_index()); |
535 } | 613 } |
536 } | 614 } |
537 | 615 |
538 | 616 |
(...skipping 18 matching lines...) Expand all Loading... |
557 OS::Print("Removing %s\n", current->DebugName()); | 635 OS::Print("Removing %s\n", current->DebugName()); |
558 } else { | 636 } else { |
559 ASSERT(!current_defn->HasUses()); | 637 ASSERT(!current_defn->HasUses()); |
560 OS::Print("Removing v%" Pd ".\n", current_defn->ssa_temp_index()); | 638 OS::Print("Removing v%" Pd ".\n", current_defn->ssa_temp_index()); |
561 } | 639 } |
562 } | 640 } |
563 iterator->RemoveCurrentFromGraph(); | 641 iterator->RemoveCurrentFromGraph(); |
564 } | 642 } |
565 | 643 |
566 | 644 |
| 645 void FlowGraphOptimizer::TryRangeDerivedOptimizations() { |
| 646 ASSERT(current_iterator_ == NULL); |
| 647 |
| 648 for (intptr_t i = 0; i < block_order_.length(); ++i) { |
| 649 BlockEntryInstr* entry = block_order_[i]; |
| 650 // First set range information on inputs to a mint mask. The following two |
| 651 // patterns are supported: |
| 652 // v7 & 0xFFFFFFFF; |
| 653 // 0xFFFFFFFF & v7; |
| 654 // Afterwards, if v7 only has a single use, v7 is marked as having |
| 655 // a 32-bit range. Also, the mask operation is removed. |
| 656 { |
| 657 ForwardInstructionIterator it(entry); |
| 658 current_iterator_ = ⁢ |
| 659 for (; !it.Done(); it.Advance()) { |
| 660 // Constrain ranges of mint operations whose only use is a mint mask op. |
| 661 if (it.Current()->IsBinaryMintOp()) { |
| 662 BinaryMintOpInstr* mintop = it.Current()->AsBinaryMintOp(); |
| 663 if (mintop->op_kind() == Token::kBIT_AND) { |
| 664 Definition* masked = OptimizeMint32BitMasks(mintop); |
| 665 if (masked != NULL) { |
| 666 // Replace mask instruction with masked input. |
| 667 ReplaceCurrentInstruction(current_iterator(), |
| 668 mintop, |
| 669 masked, |
| 670 flow_graph()); |
| 671 } |
| 672 } |
| 673 } |
| 674 } |
| 675 current_iterator_ = NULL; |
| 676 } |
| 677 // Mark mint instructions whose range information says they are positive |
| 678 // and fit in 32-bits as 32-bit. |
| 679 { |
| 680 ForwardInstructionIterator it(entry); |
| 681 current_iterator_ = ⁢ |
| 682 for (; !it.Done(); it.Advance()) { |
| 683 if (!it.Current()->IsDefinition()) { |
| 684 continue; |
| 685 } |
| 686 Definition* def = it.Current()->AsDefinition(); |
| 687 if (def->IsMintDefinition()) { |
| 688 TryMarkMint32Bit(def); |
| 689 } |
| 690 } |
| 691 current_iterator_ = NULL; |
| 692 } |
| 693 } |
| 694 } |
| 695 |
| 696 |
567 bool FlowGraphOptimizer::Canonicalize() { | 697 bool FlowGraphOptimizer::Canonicalize() { |
568 bool changed = false; | 698 bool changed = false; |
569 for (intptr_t i = 0; i < block_order_.length(); ++i) { | 699 for (intptr_t i = 0; i < block_order_.length(); ++i) { |
570 BlockEntryInstr* entry = block_order_[i]; | 700 BlockEntryInstr* entry = block_order_[i]; |
571 for (ForwardInstructionIterator it(entry); !it.Done(); it.Advance()) { | 701 for (ForwardInstructionIterator it(entry); !it.Done(); it.Advance()) { |
572 Instruction* current = it.Current(); | 702 Instruction* current = it.Current(); |
573 Instruction* replacement = current->Canonicalize(flow_graph()); | 703 Instruction* replacement = current->Canonicalize(flow_graph()); |
574 if (replacement != current) { | 704 if (replacement != current) { |
575 // For non-definitions Canonicalize should return either NULL or | 705 // For non-definitions Canonicalize should return either NULL or |
576 // this. | 706 // this. |
(...skipping 27 matching lines...) Expand all Loading... |
604 Definition* converted = NULL; | 734 Definition* converted = NULL; |
605 if ((from == kTagged) && (to == kUnboxedMint)) { | 735 if ((from == kTagged) && (to == kUnboxedMint)) { |
606 ASSERT((deopt_target != NULL) || | 736 ASSERT((deopt_target != NULL) || |
607 (use->Type()->ToCid() == kUnboxedMint)); | 737 (use->Type()->ToCid() == kUnboxedMint)); |
608 const intptr_t deopt_id = (deopt_target != NULL) ? | 738 const intptr_t deopt_id = (deopt_target != NULL) ? |
609 deopt_target->DeoptimizationTarget() : Isolate::kNoDeoptId; | 739 deopt_target->DeoptimizationTarget() : Isolate::kNoDeoptId; |
610 converted = new(I) UnboxIntegerInstr(use->CopyWithType(), deopt_id); | 740 converted = new(I) UnboxIntegerInstr(use->CopyWithType(), deopt_id); |
611 | 741 |
612 } else if ((from == kUnboxedMint) && (to == kTagged)) { | 742 } else if ((from == kUnboxedMint) && (to == kTagged)) { |
613 converted = new(I) BoxIntegerInstr(use->CopyWithType()); | 743 converted = new(I) BoxIntegerInstr(use->CopyWithType()); |
614 | 744 } else if ((from == kUnboxedMint) && (to == kUnboxedMint32)) { |
| 745 converted = new(I) MintConverterInstr(MintConverterInstr::kMintToMint32, |
| 746 use->CopyWithType()); |
| 747 } else if ((from == kUnboxedMint32) && (to == kUnboxedMint)) { |
| 748 converted = new(I) MintConverterInstr(MintConverterInstr::kMint32ToMint, |
| 749 use->CopyWithType()); |
615 } else if (from == kUnboxedMint && to == kUnboxedDouble) { | 750 } else if (from == kUnboxedMint && to == kUnboxedDouble) { |
616 ASSERT(CanUnboxDouble()); | 751 ASSERT(CanUnboxDouble()); |
617 // Convert by boxing/unboxing. | 752 // Convert by boxing/unboxing. |
618 // TODO(fschneider): Implement direct unboxed mint-to-double conversion. | 753 // TODO(fschneider): Implement direct unboxed mint-to-double conversion. |
619 BoxIntegerInstr* boxed = | 754 BoxIntegerInstr* boxed = |
620 new(I) BoxIntegerInstr(use->CopyWithType()); | 755 new(I) BoxIntegerInstr(use->CopyWithType()); |
621 use->BindTo(boxed); | 756 use->BindTo(boxed); |
622 InsertBefore(insert_before, boxed, NULL, FlowGraph::kValue); | 757 InsertBefore(insert_before, boxed, NULL, FlowGraph::kValue); |
623 | 758 |
624 const intptr_t deopt_id = (deopt_target != NULL) ? | 759 const intptr_t deopt_id = (deopt_target != NULL) ? |
(...skipping 7882 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8507 // TODO(kmillikin): Handle unbox operation. | 8642 // TODO(kmillikin): Handle unbox operation. |
8508 SetValue(instr, non_constant_); | 8643 SetValue(instr, non_constant_); |
8509 } | 8644 } |
8510 | 8645 |
8511 | 8646 |
8512 void ConstantPropagator::VisitBinaryMintOp(BinaryMintOpInstr* instr) { | 8647 void ConstantPropagator::VisitBinaryMintOp(BinaryMintOpInstr* instr) { |
8513 HandleBinaryOp(instr, instr->op_kind(), *instr->left(), *instr->right()); | 8648 HandleBinaryOp(instr, instr->op_kind(), *instr->left(), *instr->right()); |
8514 } | 8649 } |
8515 | 8650 |
8516 | 8651 |
| 8652 void ConstantPropagator::VisitMintConverter(MintConverterInstr* instr) { |
| 8653 SetValue(instr, non_constant_); |
| 8654 } |
| 8655 |
| 8656 |
8517 void ConstantPropagator::VisitShiftMintOp(ShiftMintOpInstr* instr) { | 8657 void ConstantPropagator::VisitShiftMintOp(ShiftMintOpInstr* instr) { |
8518 HandleBinaryOp(instr, instr->op_kind(), *instr->left(), *instr->right()); | 8658 HandleBinaryOp(instr, instr->op_kind(), *instr->left(), *instr->right()); |
8519 } | 8659 } |
8520 | 8660 |
8521 | 8661 |
8522 void ConstantPropagator::VisitUnaryMintOp(UnaryMintOpInstr* instr) { | 8662 void ConstantPropagator::VisitUnaryMintOp(UnaryMintOpInstr* instr) { |
8523 // TODO(kmillikin): Handle unary operations. | 8663 // TODO(kmillikin): Handle unary operations. |
8524 SetValue(instr, non_constant_); | 8664 SetValue(instr, non_constant_); |
8525 } | 8665 } |
8526 | 8666 |
(...skipping 1303 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
9830 } | 9970 } |
9831 | 9971 |
9832 // Insert materializations at environment uses. | 9972 // Insert materializations at environment uses. |
9833 for (intptr_t i = 0; i < exits.length(); i++) { | 9973 for (intptr_t i = 0; i < exits.length(); i++) { |
9834 CreateMaterializationAt(exits[i], alloc, alloc->cls(), *slots); | 9974 CreateMaterializationAt(exits[i], alloc, alloc->cls(), *slots); |
9835 } | 9975 } |
9836 } | 9976 } |
9837 | 9977 |
9838 | 9978 |
9839 } // namespace dart | 9979 } // namespace dart |
OLD | NEW |