| Index: runtime/vm/flow_graph_optimizer.cc
|
| diff --git a/runtime/vm/flow_graph_optimizer.cc b/runtime/vm/flow_graph_optimizer.cc
|
| index e012f598b5297a1b80fe2611b22f48a5c2d6cf6f..47a545b37989f395e3f156a57db1fa48dcf6ac30 100644
|
| --- a/runtime/vm/flow_graph_optimizer.cc
|
| +++ b/runtime/vm/flow_graph_optimizer.cc
|
| @@ -267,6 +267,81 @@ static bool IsPositiveOrZeroSmiConst(Definition* d) {
|
| }
|
|
|
|
|
| +Definition* FlowGraphOptimizer::OptimizeMint32BitMasks(
|
| + BinaryMintOpInstr* mask) {
|
| + ASSERT(mask != NULL);
|
| + ASSERT(mask->op_kind() == Token::kBIT_AND);
|
| + Range* range = mask->range();
|
| + if ((range == NULL) || !range->Is32BitMask()) {
|
| + // No range or range is not exactly [0, 0xFFFFFFFF].
|
| + return NULL;
|
| + }
|
| + // Find the mask target by looking for the input that isn't the constant
|
| + // 0xFFFFFFFF.
|
| + Value* target = NULL;
|
| + if (!mask->InputAt(0)->BindsTo32BitMaskConstant()) {
|
| + target = mask->InputAt(0);
|
| + } else if (!mask->InputAt(1)->BindsTo32BitMaskConstant()) {
|
| + target = mask->InputAt(1);
|
| + } else {
|
| + return NULL;
|
| + }
|
| + // If the target has multiple uses, we can't assume its range can be safely
|
| + // constrained by this mask.
|
| + if (!target->IsSingleUse()) {
|
| + return NULL;
|
| + }
|
| +
|
| + Definition* def = target->definition();
|
| + if (!def->IsMintDefinition()) {
|
| + // TODO(johnmccutchan): Should this be an ASSERT?
|
| + return NULL;
|
| + }
|
| + def->range()->Make32BitMask();
|
| +
|
| + return def;
|
| +}
|
| +
|
| +
|
| +bool FlowGraphOptimizer::TryMarkMint32Bit(Definition* mintop) {
|
| + ASSERT(mintop != NULL);
|
| + ASSERT(mintop->IsMintDefinition());
|
| +
|
| + Range* range = mintop->range();
|
| + if (range == NULL) {
|
| + return false;
|
| + }
|
| + bool is_32_bit = false;
|
| + if (range->Is32BitMask()) {
|
| + is_32_bit = true;
|
| + } else if (range->IsWithin(0, kMaxUint32)) {
|
| + is_32_bit = true;
|
| + }
|
| +
|
| + if (!is_32_bit) {
|
| + return false;
|
| + }
|
| +
|
| + if (mintop->IsBinaryMintOp()) {
|
| + BinaryMintOpInstr* instr = mintop->AsBinaryMintOp();
|
| + instr->set_is_32_bit(true);
|
| + } else if (mintop->IsShiftMintOp()) {
|
| + ShiftMintOpInstr* instr = mintop->AsShiftMintOp();
|
| + instr->set_is_32_bit(true);
|
| + } else if (mintop->IsUnboxInteger()) {
|
| + UnboxIntegerInstr* instr = mintop->AsUnboxInteger();
|
| + instr->set_is_32_bit(true);
|
| + } else if (mintop->IsUnaryMintOp()) {
|
| + UnaryMintOpInstr* instr = mintop->AsUnaryMintOp();
|
| + instr->set_is_32_bit(true);
|
| + } else {
|
| + UNREACHABLE();
|
| + }
|
| +
|
| + return true;
|
| +}
|
| +
|
| +
|
| void FlowGraphOptimizer::OptimizeLeftShiftBitAndSmiOp(
|
| Definition* bit_and_instr,
|
| Definition* left_instr,
|
| @@ -279,7 +354,9 @@ void FlowGraphOptimizer::OptimizeLeftShiftBitAndSmiOp(
|
| if (!is_positive_or_zero) {
|
| is_positive_or_zero = IsPositiveOrZeroSmiConst(right_instr);
|
| }
|
| - if (!is_positive_or_zero) return;
|
| + if (!is_positive_or_zero) {
|
| + return;
|
| + }
|
|
|
| BinarySmiOpInstr* smi_shift_left = NULL;
|
| if (bit_and_instr->InputAt(0)->IsSingleUse()) {
|
| @@ -288,7 +365,9 @@ void FlowGraphOptimizer::OptimizeLeftShiftBitAndSmiOp(
|
| if ((smi_shift_left == NULL) && (bit_and_instr->InputAt(1)->IsSingleUse())) {
|
| smi_shift_left = AsSmiShiftLeftInstruction(right_instr);
|
| }
|
| - if (smi_shift_left == NULL) return;
|
| + if (smi_shift_left == NULL) {
|
| + return;
|
| + }
|
|
|
| // Pattern recognized.
|
| smi_shift_left->set_is_truncating(true);
|
| @@ -525,7 +604,6 @@ void FlowGraphOptimizer::TryOptimizePatterns() {
|
| }
|
| }
|
|
|
| -
|
| static void EnsureSSATempIndex(FlowGraph* graph,
|
| Definition* defn,
|
| Definition* replacement) {
|
| @@ -564,6 +642,58 @@ static void ReplaceCurrentInstruction(ForwardInstructionIterator* iterator,
|
| }
|
|
|
|
|
| +void FlowGraphOptimizer::TryRangeDerivedOptimizations() {
|
| + ASSERT(current_iterator_ == NULL);
|
| +
|
| + for (intptr_t i = 0; i < block_order_.length(); ++i) {
|
| + BlockEntryInstr* entry = block_order_[i];
|
| + // First set range information on inputs to a mint mask. The following two
|
| + // patterns are supported:
|
| + // v7 & 0xFFFFFFFF;
|
| + // 0xFFFFFFFF & v7;
|
| + // Afterwards, if v7 only has a single use, v7 is marked as having
|
| + // a 32-bit range. Also, the mask operation is removed.
|
| + {
|
| + ForwardInstructionIterator it(entry);
|
| + current_iterator_ = ⁢
|
| + for (; !it.Done(); it.Advance()) {
|
| + // Constrain ranges of mint operations whose only use is a mint mask op.
|
| + if (it.Current()->IsBinaryMintOp()) {
|
| + BinaryMintOpInstr* mintop = it.Current()->AsBinaryMintOp();
|
| + if (mintop->op_kind() == Token::kBIT_AND) {
|
| + Definition* masked = OptimizeMint32BitMasks(mintop);
|
| + if (masked != NULL) {
|
| + // Replace mask instruction with masked input.
|
| + ReplaceCurrentInstruction(current_iterator(),
|
| + mintop,
|
| + masked,
|
| + flow_graph());
|
| + }
|
| + }
|
| + }
|
| + }
|
| + current_iterator_ = NULL;
|
| + }
|
| + // Mark mint instructions whose range information says they are positive
|
| + // and fit in 32-bits as 32-bit.
|
| + {
|
| + ForwardInstructionIterator it(entry);
|
| + current_iterator_ = ⁢
|
| + for (; !it.Done(); it.Advance()) {
|
| + if (!it.Current()->IsDefinition()) {
|
| + continue;
|
| + }
|
| + Definition* def = it.Current()->AsDefinition();
|
| + if (def->IsMintDefinition()) {
|
| + TryMarkMint32Bit(def);
|
| + }
|
| + }
|
| + current_iterator_ = NULL;
|
| + }
|
| + }
|
| +}
|
| +
|
| +
|
| bool FlowGraphOptimizer::Canonicalize() {
|
| bool changed = false;
|
| for (intptr_t i = 0; i < block_order_.length(); ++i) {
|
| @@ -611,7 +741,12 @@ void FlowGraphOptimizer::InsertConversion(Representation from,
|
|
|
| } else if ((from == kUnboxedMint) && (to == kTagged)) {
|
| converted = new(I) BoxIntegerInstr(use->CopyWithType());
|
| -
|
| + } else if ((from == kUnboxedMint) && (to == kUnboxedMint32)) {
|
| + converted = new(I) MintConverterInstr(MintConverterInstr::kMintToMint32,
|
| + use->CopyWithType());
|
| + } else if ((from == kUnboxedMint32) && (to == kUnboxedMint)) {
|
| + converted = new(I) MintConverterInstr(MintConverterInstr::kMint32ToMint,
|
| + use->CopyWithType());
|
| } else if (from == kUnboxedMint && to == kUnboxedDouble) {
|
| ASSERT(CanUnboxDouble());
|
| // Convert by boxing/unboxing.
|
| @@ -8514,6 +8649,11 @@ void ConstantPropagator::VisitBinaryMintOp(BinaryMintOpInstr* instr) {
|
| }
|
|
|
|
|
| +void ConstantPropagator::VisitMintConverter(MintConverterInstr* instr) {
|
| + SetValue(instr, non_constant_);
|
| +}
|
| +
|
| +
|
| void ConstantPropagator::VisitShiftMintOp(ShiftMintOpInstr* instr) {
|
| HandleBinaryOp(instr, instr->op_kind(), *instr->left(), *instr->right());
|
| }
|
|
|