| OLD | NEW |
| 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/flow_graph_range_analysis.h" | 5 #include "vm/flow_graph_range_analysis.h" |
| 6 | 6 |
| 7 #include "vm/bit_vector.h" | 7 #include "vm/bit_vector.h" |
| 8 #include "vm/il_printer.h" | 8 #include "vm/il_printer.h" |
| 9 | 9 |
| 10 namespace dart { | 10 namespace dart { |
| 11 | 11 |
| 12 DEFINE_FLAG(bool, array_bounds_check_elimination, true, | 12 DEFINE_FLAG(bool, |
| 13 "Eliminate redundant bounds checks."); | 13 array_bounds_check_elimination, |
| 14 true, |
| 15 "Eliminate redundant bounds checks."); |
| 14 DEFINE_FLAG(bool, trace_range_analysis, false, "Trace range analysis progress"); | 16 DEFINE_FLAG(bool, trace_range_analysis, false, "Trace range analysis progress"); |
| 15 DEFINE_FLAG(bool, trace_integer_ir_selection, false, | 17 DEFINE_FLAG(bool, |
| 16 "Print integer IR selection optimization pass."); | 18 trace_integer_ir_selection, |
| 19 false, |
| 20 "Print integer IR selection optimization pass."); |
| 17 DECLARE_FLAG(bool, trace_constant_propagation); | 21 DECLARE_FLAG(bool, trace_constant_propagation); |
| 18 | 22 |
| 19 // Quick access to the locally defined isolate() and zone() methods. | 23 // Quick access to the locally defined isolate() and zone() methods. |
| 20 #define I (isolate()) | 24 #define I (isolate()) |
| 21 #define Z (zone()) | 25 #define Z (zone()) |
| 22 | 26 |
| 23 void RangeAnalysis::Analyze() { | 27 void RangeAnalysis::Analyze() { |
| 24 CollectValues(); | 28 CollectValues(); |
| 25 InsertConstraints(); | 29 InsertConstraints(); |
| 26 DiscoverSimpleInductionVariables(); | 30 DiscoverSimpleInductionVariables(); |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 74 class InductionVariableInfo : public ZoneAllocated { | 78 class InductionVariableInfo : public ZoneAllocated { |
| 75 public: | 79 public: |
| 76 InductionVariableInfo(PhiInstr* phi, | 80 InductionVariableInfo(PhiInstr* phi, |
| 77 Definition* initial_value, | 81 Definition* initial_value, |
| 78 BinarySmiOpInstr* increment, | 82 BinarySmiOpInstr* increment, |
| 79 ConstraintInstr* limit) | 83 ConstraintInstr* limit) |
| 80 : phi_(phi), | 84 : phi_(phi), |
| 81 initial_value_(initial_value), | 85 initial_value_(initial_value), |
| 82 increment_(increment), | 86 increment_(increment), |
| 83 limit_(limit), | 87 limit_(limit), |
| 84 bound_(NULL) { } | 88 bound_(NULL) {} |
| 85 | 89 |
| 86 PhiInstr* phi() const { return phi_; } | 90 PhiInstr* phi() const { return phi_; } |
| 87 Definition* initial_value() const { return initial_value_; } | 91 Definition* initial_value() const { return initial_value_; } |
| 88 BinarySmiOpInstr* increment() const { return increment_; } | 92 BinarySmiOpInstr* increment() const { return increment_; } |
| 89 | 93 |
| 90 // Outermost constraint that constrains this induction variable into | 94 // Outermost constraint that constrains this induction variable into |
| 91 // [-inf, X] range. | 95 // [-inf, X] range. |
| 92 ConstraintInstr* limit() const { return limit_; } | 96 ConstraintInstr* limit() const { return limit_; } |
| 93 | 97 |
| 94 // Induction variable from the same join block that has limiting constraint. | 98 // Induction variable from the same join block that has limiting constraint. |
| 95 PhiInstr* bound() const { return bound_; } | 99 PhiInstr* bound() const { return bound_; } |
| 96 void set_bound(PhiInstr* bound) { bound_ = bound; } | 100 void set_bound(PhiInstr* bound) { bound_ = bound; } |
| 97 | 101 |
| 98 private: | 102 private: |
| 99 PhiInstr* phi_; | 103 PhiInstr* phi_; |
| 100 Definition* initial_value_; | 104 Definition* initial_value_; |
| 101 BinarySmiOpInstr* increment_; | 105 BinarySmiOpInstr* increment_; |
| 102 ConstraintInstr* limit_; | 106 ConstraintInstr* limit_; |
| 103 | 107 |
| 104 PhiInstr* bound_; | 108 PhiInstr* bound_; |
| 105 }; | 109 }; |
| 106 | 110 |
| 107 | 111 |
| 108 static ConstraintInstr* FindBoundingConstraint(PhiInstr* phi, | 112 static ConstraintInstr* FindBoundingConstraint(PhiInstr* phi, |
| 109 Definition* defn) { | 113 Definition* defn) { |
| 110 ConstraintInstr* limit = NULL; | 114 ConstraintInstr* limit = NULL; |
| 111 for (ConstraintInstr* constraint = defn->AsConstraint(); | 115 for (ConstraintInstr* constraint = defn->AsConstraint(); constraint != NULL; |
| 112 constraint != NULL; | |
| 113 constraint = constraint->value()->definition()->AsConstraint()) { | 116 constraint = constraint->value()->definition()->AsConstraint()) { |
| 114 if (constraint->target() == NULL) { | 117 if (constraint->target() == NULL) { |
| 115 continue; // Only interested in non-artifical constraints. | 118 continue; // Only interested in non-artifical constraints. |
| 116 } | 119 } |
| 117 | 120 |
| 118 Range* constraining_range = constraint->constraint(); | 121 Range* constraining_range = constraint->constraint(); |
| 119 if (constraining_range->min().Equals(RangeBoundary::MinSmi()) && | 122 if (constraining_range->min().Equals(RangeBoundary::MinSmi()) && |
| 120 (constraining_range->max().IsSymbol() && | 123 (constraining_range->max().IsSymbol() && |
| 121 phi->IsDominatedBy(constraining_range->max().symbol()))) { | 124 phi->IsDominatedBy(constraining_range->max().symbol()))) { |
| 122 limit = constraint; | 125 limit = constraint; |
| (...skipping 10 matching lines...) Expand all Loading... |
| 133 } | 136 } |
| 134 | 137 |
| 135 if (phi->InputCount() != 2) { | 138 if (phi->InputCount() != 2) { |
| 136 return NULL; | 139 return NULL; |
| 137 } | 140 } |
| 138 | 141 |
| 139 BitVector* loop_info = phi->block()->loop_info(); | 142 BitVector* loop_info = phi->block()->loop_info(); |
| 140 | 143 |
| 141 const intptr_t backedge_idx = | 144 const intptr_t backedge_idx = |
| 142 loop_info->Contains(phi->block()->PredecessorAt(0)->preorder_number()) | 145 loop_info->Contains(phi->block()->PredecessorAt(0)->preorder_number()) |
| 143 ? 0 : 1; | 146 ? 0 |
| 147 : 1; |
| 144 | 148 |
| 145 Definition* initial_value = | 149 Definition* initial_value = phi->InputAt(1 - backedge_idx)->definition(); |
| 146 phi->InputAt(1 - backedge_idx)->definition(); | |
| 147 | 150 |
| 148 BinarySmiOpInstr* increment = | 151 BinarySmiOpInstr* increment = |
| 149 UnwrapConstraint(phi->InputAt(backedge_idx)->definition())-> | 152 UnwrapConstraint(phi->InputAt(backedge_idx)->definition()) |
| 150 AsBinarySmiOp(); | 153 ->AsBinarySmiOp(); |
| 151 | 154 |
| 152 if ((increment != NULL) && | 155 if ((increment != NULL) && (increment->op_kind() == Token::kADD) && |
| 153 (increment->op_kind() == Token::kADD) && | |
| 154 (UnwrapConstraint(increment->left()->definition()) == phi) && | 156 (UnwrapConstraint(increment->left()->definition()) == phi) && |
| 155 increment->right()->BindsToConstant() && | 157 increment->right()->BindsToConstant() && |
| 156 increment->right()->BoundConstant().IsSmi() && | 158 increment->right()->BoundConstant().IsSmi() && |
| 157 (Smi::Cast(increment->right()->BoundConstant()).Value() == 1)) { | 159 (Smi::Cast(increment->right()->BoundConstant()).Value() == 1)) { |
| 158 return new InductionVariableInfo( | 160 return new InductionVariableInfo( |
| 159 phi, | 161 phi, initial_value, increment, |
| 160 initial_value, | |
| 161 increment, | |
| 162 FindBoundingConstraint(phi, increment->left()->definition())); | 162 FindBoundingConstraint(phi, increment->left()->definition())); |
| 163 } | 163 } |
| 164 | 164 |
| 165 return NULL; | 165 return NULL; |
| 166 } | 166 } |
| 167 | 167 |
| 168 | 168 |
| 169 void RangeAnalysis::DiscoverSimpleInductionVariables() { | 169 void RangeAnalysis::DiscoverSimpleInductionVariables() { |
| 170 GrowableArray<InductionVariableInfo*> loop_variables; | 170 GrowableArray<InductionVariableInfo*> loop_variables; |
| 171 | 171 |
| 172 for (BlockIterator block_it = flow_graph_->reverse_postorder_iterator(); | 172 for (BlockIterator block_it = flow_graph_->reverse_postorder_iterator(); |
| 173 !block_it.Done(); | 173 !block_it.Done(); block_it.Advance()) { |
| 174 block_it.Advance()) { | |
| 175 BlockEntryInstr* block = block_it.Current(); | 174 BlockEntryInstr* block = block_it.Current(); |
| 176 | 175 |
| 177 JoinEntryInstr* join = block->AsJoinEntry(); | 176 JoinEntryInstr* join = block->AsJoinEntry(); |
| 178 if (join != NULL && join->loop_info() != NULL) { | 177 if (join != NULL && join->loop_info() != NULL) { |
| 179 loop_variables.Clear(); | 178 loop_variables.Clear(); |
| 180 | 179 |
| 181 for (PhiIterator phi_it(join); !phi_it.Done(); phi_it.Advance()) { | 180 for (PhiIterator phi_it(join); !phi_it.Done(); phi_it.Advance()) { |
| 182 PhiInstr* current = phi_it.Current(); | 181 PhiInstr* current = phi_it.Current(); |
| 183 | 182 |
| 184 InductionVariableInfo* info = DetectSimpleInductionVariable(current); | 183 InductionVariableInfo* info = DetectSimpleInductionVariable(current); |
| 185 if (info != NULL) { | 184 if (info != NULL) { |
| 186 if (FLAG_support_il_printer && FLAG_trace_range_analysis) { | 185 if (FLAG_support_il_printer && FLAG_trace_range_analysis) { |
| 187 THR_Print("Simple loop variable: %s bound <%s>\n", | 186 THR_Print("Simple loop variable: %s bound <%s>\n", |
| 188 current->ToCString(), | 187 current->ToCString(), |
| 189 info->limit() != NULL ? | 188 info->limit() != NULL ? info->limit()->ToCString() : "?"); |
| 190 info->limit()->ToCString() : "?"); | |
| 191 } | 189 } |
| 192 | 190 |
| 193 loop_variables.Add(info); | 191 loop_variables.Add(info); |
| 194 } | 192 } |
| 195 } | 193 } |
| 196 } | 194 } |
| 197 | 195 |
| 198 InductionVariableInfo* bound = NULL; | 196 InductionVariableInfo* bound = NULL; |
| 199 for (intptr_t i = 0; i < loop_variables.length(); i++) { | 197 for (intptr_t i = 0; i < loop_variables.length(); i++) { |
| 200 if (loop_variables[i]->limit() != NULL) { | 198 if (loop_variables[i]->limit() != NULL) { |
| (...skipping 17 matching lines...) Expand all Loading... |
| 218 const GrowableArray<Definition*>& initial = | 216 const GrowableArray<Definition*>& initial = |
| 219 *flow_graph_->graph_entry()->initial_definitions(); | 217 *flow_graph_->graph_entry()->initial_definitions(); |
| 220 for (intptr_t i = 0; i < initial.length(); ++i) { | 218 for (intptr_t i = 0; i < initial.length(); ++i) { |
| 221 Definition* current = initial[i]; | 219 Definition* current = initial[i]; |
| 222 if (IsIntegerDefinition(current)) { | 220 if (IsIntegerDefinition(current)) { |
| 223 values_.Add(current); | 221 values_.Add(current); |
| 224 } | 222 } |
| 225 } | 223 } |
| 226 | 224 |
| 227 for (BlockIterator block_it = flow_graph_->reverse_postorder_iterator(); | 225 for (BlockIterator block_it = flow_graph_->reverse_postorder_iterator(); |
| 228 !block_it.Done(); | 226 !block_it.Done(); block_it.Advance()) { |
| 229 block_it.Advance()) { | |
| 230 BlockEntryInstr* block = block_it.Current(); | 227 BlockEntryInstr* block = block_it.Current(); |
| 231 | 228 |
| 232 | 229 |
| 233 if (block->IsGraphEntry() || block->IsCatchBlockEntry()) { | 230 if (block->IsGraphEntry() || block->IsCatchBlockEntry()) { |
| 234 const GrowableArray<Definition*>& initial = block->IsGraphEntry() | 231 const GrowableArray<Definition*>& initial = |
| 235 ? *block->AsGraphEntry()->initial_definitions() | 232 block->IsGraphEntry() |
| 236 : *block->AsCatchBlockEntry()->initial_definitions(); | 233 ? *block->AsGraphEntry()->initial_definitions() |
| 234 : *block->AsCatchBlockEntry()->initial_definitions(); |
| 237 for (intptr_t i = 0; i < initial.length(); ++i) { | 235 for (intptr_t i = 0; i < initial.length(); ++i) { |
| 238 Definition* current = initial[i]; | 236 Definition* current = initial[i]; |
| 239 if (IsIntegerDefinition(current)) { | 237 if (IsIntegerDefinition(current)) { |
| 240 values_.Add(current); | 238 values_.Add(current); |
| 241 } | 239 } |
| 242 } | 240 } |
| 243 } | 241 } |
| 244 | 242 |
| 245 JoinEntryInstr* join = block->AsJoinEntry(); | 243 JoinEntryInstr* join = block->AsJoinEntry(); |
| 246 if (join != NULL) { | 244 if (join != NULL) { |
| 247 for (PhiIterator phi_it(join); !phi_it.Done(); phi_it.Advance()) { | 245 for (PhiIterator phi_it(join); !phi_it.Done(); phi_it.Advance()) { |
| 248 PhiInstr* current = phi_it.Current(); | 246 PhiInstr* current = phi_it.Current(); |
| 249 if (current->Type()->IsInt()) { | 247 if (current->Type()->IsInt()) { |
| 250 values_.Add(current); | 248 values_.Add(current); |
| 251 } | 249 } |
| 252 } | 250 } |
| 253 } | 251 } |
| 254 | 252 |
| 255 for (ForwardInstructionIterator instr_it(block); | 253 for (ForwardInstructionIterator instr_it(block); !instr_it.Done(); |
| 256 !instr_it.Done(); | |
| 257 instr_it.Advance()) { | 254 instr_it.Advance()) { |
| 258 Instruction* current = instr_it.Current(); | 255 Instruction* current = instr_it.Current(); |
| 259 Definition* defn = current->AsDefinition(); | 256 Definition* defn = current->AsDefinition(); |
| 260 if (defn != NULL) { | 257 if (defn != NULL) { |
| 261 if (defn->HasSSATemp() && IsIntegerDefinition(defn)) { | 258 if (defn->HasSSATemp() && IsIntegerDefinition(defn)) { |
| 262 values_.Add(defn); | 259 values_.Add(defn); |
| 263 if (defn->IsBinaryMintOp()) { | 260 if (defn->IsBinaryMintOp()) { |
| 264 binary_mint_ops_.Add(defn->AsBinaryMintOp()); | 261 binary_mint_ops_.Add(defn->AsBinaryMintOp()); |
| 265 } else if (defn->IsShiftMintOp()) { | 262 } else if (defn->IsShiftMintOp()) { |
| 266 shift_mint_ops_.Add(defn->AsShiftMintOp()); | 263 shift_mint_ops_.Add(defn->AsShiftMintOp()); |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 298 return false; | 295 return false; |
| 299 } | 296 } |
| 300 | 297 |
| 301 return dom_block->Dominates(use_block); | 298 return dom_block->Dominates(use_block); |
| 302 } | 299 } |
| 303 | 300 |
| 304 | 301 |
| 305 void RangeAnalysis::RenameDominatedUses(Definition* def, | 302 void RangeAnalysis::RenameDominatedUses(Definition* def, |
| 306 Instruction* dom, | 303 Instruction* dom, |
| 307 Definition* other) { | 304 Definition* other) { |
| 308 for (Value::Iterator it(def->input_use_list()); | 305 for (Value::Iterator it(def->input_use_list()); !it.Done(); it.Advance()) { |
| 309 !it.Done(); | |
| 310 it.Advance()) { | |
| 311 Value* use = it.Current(); | 306 Value* use = it.Current(); |
| 312 | 307 |
| 313 // Skip dead phis. | 308 // Skip dead phis. |
| 314 PhiInstr* phi = use->instruction()->AsPhi(); | 309 PhiInstr* phi = use->instruction()->AsPhi(); |
| 315 ASSERT((phi == NULL) || phi->is_alive()); | 310 ASSERT((phi == NULL) || phi->is_alive()); |
| 316 if (IsDominatedUse(dom, use)) { | 311 if (IsDominatedUse(dom, use)) { |
| 317 use->BindTo(other); | 312 use->BindTo(other); |
| 318 } | 313 } |
| 319 } | 314 } |
| 320 } | 315 } |
| 321 | 316 |
| 322 | 317 |
| 323 // For a comparison operation return an operation for the equivalent flipped | 318 // For a comparison operation return an operation for the equivalent flipped |
| 324 // comparison: a (op) b === b (op') a. | 319 // comparison: a (op) b === b (op') a. |
| 325 static Token::Kind FlipComparison(Token::Kind op) { | 320 static Token::Kind FlipComparison(Token::Kind op) { |
| 326 switch (op) { | 321 switch (op) { |
| 327 case Token::kEQ: return Token::kEQ; | 322 case Token::kEQ: |
| 328 case Token::kNE: return Token::kNE; | 323 return Token::kEQ; |
| 329 case Token::kLT: return Token::kGT; | 324 case Token::kNE: |
| 330 case Token::kGT: return Token::kLT; | 325 return Token::kNE; |
| 331 case Token::kLTE: return Token::kGTE; | 326 case Token::kLT: |
| 332 case Token::kGTE: return Token::kLTE; | 327 return Token::kGT; |
| 328 case Token::kGT: |
| 329 return Token::kLT; |
| 330 case Token::kLTE: |
| 331 return Token::kGTE; |
| 332 case Token::kGTE: |
| 333 return Token::kLTE; |
| 333 default: | 334 default: |
| 334 UNREACHABLE(); | 335 UNREACHABLE(); |
| 335 return Token::kILLEGAL; | 336 return Token::kILLEGAL; |
| 336 } | 337 } |
| 337 } | 338 } |
| 338 | 339 |
| 339 | 340 |
| 340 // Given a boundary (right operand) and a comparison operation return | 341 // Given a boundary (right operand) and a comparison operation return |
| 341 // a symbolic range constraint for the left operand of the comparison assuming | 342 // a symbolic range constraint for the left operand of the comparison assuming |
| 342 // that it evaluated to true. | 343 // that it evaluated to true. |
| 343 // For example for the comparison a < b symbol a is constrained with range | 344 // For example for the comparison a < b symbol a is constrained with range |
| 344 // [Smi::kMinValue, b - 1]. | 345 // [Smi::kMinValue, b - 1]. |
| 345 Range* RangeAnalysis::ConstraintSmiRange(Token::Kind op, Definition* boundary) { | 346 Range* RangeAnalysis::ConstraintSmiRange(Token::Kind op, Definition* boundary) { |
| 346 switch (op) { | 347 switch (op) { |
| 347 case Token::kEQ: | 348 case Token::kEQ: |
| 348 return new(Z) Range(RangeBoundary::FromDefinition(boundary), | 349 return new (Z) Range(RangeBoundary::FromDefinition(boundary), |
| 349 RangeBoundary::FromDefinition(boundary)); | 350 RangeBoundary::FromDefinition(boundary)); |
| 350 case Token::kNE: | 351 case Token::kNE: |
| 351 return new(Z) Range(Range::Full(RangeBoundary::kRangeBoundarySmi)); | 352 return new (Z) Range(Range::Full(RangeBoundary::kRangeBoundarySmi)); |
| 352 case Token::kLT: | 353 case Token::kLT: |
| 353 return new(Z) Range(RangeBoundary::MinSmi(), | 354 return new (Z) Range(RangeBoundary::MinSmi(), |
| 354 RangeBoundary::FromDefinition(boundary, -1)); | 355 RangeBoundary::FromDefinition(boundary, -1)); |
| 355 case Token::kGT: | 356 case Token::kGT: |
| 356 return new(Z) Range(RangeBoundary::FromDefinition(boundary, 1), | 357 return new (Z) Range(RangeBoundary::FromDefinition(boundary, 1), |
| 357 RangeBoundary::MaxSmi()); | 358 RangeBoundary::MaxSmi()); |
| 358 case Token::kLTE: | 359 case Token::kLTE: |
| 359 return new(Z) Range(RangeBoundary::MinSmi(), | 360 return new (Z) Range(RangeBoundary::MinSmi(), |
| 360 RangeBoundary::FromDefinition(boundary)); | 361 RangeBoundary::FromDefinition(boundary)); |
| 361 case Token::kGTE: | 362 case Token::kGTE: |
| 362 return new(Z) Range(RangeBoundary::FromDefinition(boundary), | 363 return new (Z) Range(RangeBoundary::FromDefinition(boundary), |
| 363 RangeBoundary::MaxSmi()); | 364 RangeBoundary::MaxSmi()); |
| 364 default: | 365 default: |
| 365 UNREACHABLE(); | 366 UNREACHABLE(); |
| 366 return NULL; | 367 return NULL; |
| 367 } | 368 } |
| 368 } | 369 } |
| 369 | 370 |
| 370 | 371 |
| 371 ConstraintInstr* RangeAnalysis::InsertConstraintFor(Value* use, | 372 ConstraintInstr* RangeAnalysis::InsertConstraintFor(Value* use, |
| 372 Definition* defn, | 373 Definition* defn, |
| 373 Range* constraint_range, | 374 Range* constraint_range, |
| 374 Instruction* after) { | 375 Instruction* after) { |
| 375 // No need to constrain constants. | 376 // No need to constrain constants. |
| 376 if (defn->IsConstant()) return NULL; | 377 if (defn->IsConstant()) return NULL; |
| 377 | 378 |
| 378 // Check if the value is already constrained to avoid inserting duplicated | 379 // Check if the value is already constrained to avoid inserting duplicated |
| 379 // constraints. | 380 // constraints. |
| 380 ConstraintInstr* constraint = after->next()->AsConstraint(); | 381 ConstraintInstr* constraint = after->next()->AsConstraint(); |
| 381 while (constraint != NULL) { | 382 while (constraint != NULL) { |
| 382 if ((constraint->value()->definition() == defn) && | 383 if ((constraint->value()->definition() == defn) && |
| 383 constraint->constraint()->Equals(constraint_range)) { | 384 constraint->constraint()->Equals(constraint_range)) { |
| 384 return NULL; | 385 return NULL; |
| 385 } | 386 } |
| 386 constraint = constraint->next()->AsConstraint(); | 387 constraint = constraint->next()->AsConstraint(); |
| 387 } | 388 } |
| 388 | 389 |
| 389 constraint = new(Z) ConstraintInstr( | 390 constraint = new (Z) ConstraintInstr(use->CopyWithType(), constraint_range); |
| 390 use->CopyWithType(), constraint_range); | |
| 391 | 391 |
| 392 flow_graph_->InsertAfter(after, constraint, NULL, FlowGraph::kValue); | 392 flow_graph_->InsertAfter(after, constraint, NULL, FlowGraph::kValue); |
| 393 RenameDominatedUses(defn, constraint, constraint); | 393 RenameDominatedUses(defn, constraint, constraint); |
| 394 constraints_.Add(constraint); | 394 constraints_.Add(constraint); |
| 395 return constraint; | 395 return constraint; |
| 396 } | 396 } |
| 397 | 397 |
| 398 | 398 |
| 399 bool RangeAnalysis::ConstrainValueAfterBranch(Value* use, Definition* defn) { | 399 bool RangeAnalysis::ConstrainValueAfterBranch(Value* use, Definition* defn) { |
| 400 BranchInstr* branch = use->instruction()->AsBranch(); | 400 BranchInstr* branch = use->instruction()->AsBranch(); |
| 401 RelationalOpInstr* rel_op = branch->comparison()->AsRelationalOp(); | 401 RelationalOpInstr* rel_op = branch->comparison()->AsRelationalOp(); |
| 402 if ((rel_op != NULL) && (rel_op->operation_cid() == kSmiCid)) { | 402 if ((rel_op != NULL) && (rel_op->operation_cid() == kSmiCid)) { |
| 403 // Found comparison of two smis. Constrain defn at true and false | 403 // Found comparison of two smis. Constrain defn at true and false |
| 404 // successors using the other operand as a boundary. | 404 // successors using the other operand as a boundary. |
| 405 Definition* boundary; | 405 Definition* boundary; |
| 406 Token::Kind op_kind; | 406 Token::Kind op_kind; |
| 407 if (use->use_index() == 0) { // Left operand. | 407 if (use->use_index() == 0) { // Left operand. |
| 408 boundary = rel_op->InputAt(1)->definition(); | 408 boundary = rel_op->InputAt(1)->definition(); |
| 409 op_kind = rel_op->kind(); | 409 op_kind = rel_op->kind(); |
| 410 } else { | 410 } else { |
| 411 ASSERT(use->use_index() == 1); // Right operand. | 411 ASSERT(use->use_index() == 1); // Right operand. |
| 412 boundary = rel_op->InputAt(0)->definition(); | 412 boundary = rel_op->InputAt(0)->definition(); |
| 413 // InsertConstraintFor assumes that defn is left operand of a | 413 // InsertConstraintFor assumes that defn is left operand of a |
| 414 // comparison if it is right operand flip the comparison. | 414 // comparison if it is right operand flip the comparison. |
| 415 op_kind = FlipComparison(rel_op->kind()); | 415 op_kind = FlipComparison(rel_op->kind()); |
| 416 } | 416 } |
| 417 | 417 |
| 418 // Constrain definition at the true successor. | 418 // Constrain definition at the true successor. |
| 419 ConstraintInstr* true_constraint = | 419 ConstraintInstr* true_constraint = |
| 420 InsertConstraintFor(use, | 420 InsertConstraintFor(use, defn, ConstraintSmiRange(op_kind, boundary), |
| 421 defn, | |
| 422 ConstraintSmiRange(op_kind, boundary), | |
| 423 branch->true_successor()); | 421 branch->true_successor()); |
| 424 if (true_constraint != NULL) { | 422 if (true_constraint != NULL) { |
| 425 true_constraint->set_target(branch->true_successor()); | 423 true_constraint->set_target(branch->true_successor()); |
| 426 } | 424 } |
| 427 | 425 |
| 428 // Constrain definition with a negated condition at the false successor. | 426 // Constrain definition with a negated condition at the false successor. |
| 429 ConstraintInstr* false_constraint = | 427 ConstraintInstr* false_constraint = InsertConstraintFor( |
| 430 InsertConstraintFor( | 428 use, defn, |
| 431 use, | 429 ConstraintSmiRange(Token::NegateComparison(op_kind), boundary), |
| 432 defn, | 430 branch->false_successor()); |
| 433 ConstraintSmiRange(Token::NegateComparison(op_kind), boundary), | |
| 434 branch->false_successor()); | |
| 435 if (false_constraint != NULL) { | 431 if (false_constraint != NULL) { |
| 436 false_constraint->set_target(branch->false_successor()); | 432 false_constraint->set_target(branch->false_successor()); |
| 437 } | 433 } |
| 438 | 434 |
| 439 return true; | 435 return true; |
| 440 } | 436 } |
| 441 | 437 |
| 442 return false; | 438 return false; |
| 443 } | 439 } |
| 444 | 440 |
| 445 | 441 |
| 446 void RangeAnalysis::InsertConstraintsFor(Definition* defn) { | 442 void RangeAnalysis::InsertConstraintsFor(Definition* defn) { |
| 447 for (Value* use = defn->input_use_list(); | 443 for (Value* use = defn->input_use_list(); use != NULL; |
| 448 use != NULL; | |
| 449 use = use->next_use()) { | 444 use = use->next_use()) { |
| 450 if (use->instruction()->IsBranch()) { | 445 if (use->instruction()->IsBranch()) { |
| 451 if (ConstrainValueAfterBranch(use, defn)) { | 446 if (ConstrainValueAfterBranch(use, defn)) { |
| 452 Value* other_value = use->instruction()->InputAt(1 - use->use_index()); | 447 Value* other_value = use->instruction()->InputAt(1 - use->use_index()); |
| 453 if (!IsIntegerDefinition(other_value->definition())) { | 448 if (!IsIntegerDefinition(other_value->definition())) { |
| 454 ConstrainValueAfterBranch(other_value, other_value->definition()); | 449 ConstrainValueAfterBranch(other_value, other_value->definition()); |
| 455 } | 450 } |
| 456 } | 451 } |
| 457 } else if (use->instruction()->IsCheckArrayBound()) { | 452 } else if (use->instruction()->IsCheckArrayBound()) { |
| 458 ConstrainValueAfterCheckArrayBound(use, defn); | 453 ConstrainValueAfterCheckArrayBound(use, defn); |
| 459 } | 454 } |
| 460 } | 455 } |
| 461 } | 456 } |
| 462 | 457 |
| 463 | 458 |
| 464 void RangeAnalysis::ConstrainValueAfterCheckArrayBound( | 459 void RangeAnalysis::ConstrainValueAfterCheckArrayBound(Value* use, |
| 465 Value* use, | 460 Definition* defn) { |
| 466 Definition* defn) { | |
| 467 CheckArrayBoundInstr* check = use->instruction()->AsCheckArrayBound(); | 461 CheckArrayBoundInstr* check = use->instruction()->AsCheckArrayBound(); |
| 468 intptr_t use_index = use->use_index(); | 462 intptr_t use_index = use->use_index(); |
| 469 | 463 |
| 470 Range* constraint_range = NULL; | 464 Range* constraint_range = NULL; |
| 471 if (use_index == CheckArrayBoundInstr::kIndexPos) { | 465 if (use_index == CheckArrayBoundInstr::kIndexPos) { |
| 472 Definition* length = check->length()->definition(); | 466 Definition* length = check->length()->definition(); |
| 473 constraint_range = new(Z) Range( | 467 constraint_range = new (Z) Range(RangeBoundary::FromConstant(0), |
| 474 RangeBoundary::FromConstant(0), | 468 RangeBoundary::FromDefinition(length, -1)); |
| 475 RangeBoundary::FromDefinition(length, -1)); | |
| 476 } else { | 469 } else { |
| 477 ASSERT(use_index == CheckArrayBoundInstr::kLengthPos); | 470 ASSERT(use_index == CheckArrayBoundInstr::kLengthPos); |
| 478 Definition* index = check->index()->definition(); | 471 Definition* index = check->index()->definition(); |
| 479 constraint_range = new(Z) Range( | 472 constraint_range = new (Z) |
| 480 RangeBoundary::FromDefinition(index, 1), | 473 Range(RangeBoundary::FromDefinition(index, 1), RangeBoundary::MaxSmi()); |
| 481 RangeBoundary::MaxSmi()); | |
| 482 } | 474 } |
| 483 InsertConstraintFor(use, defn, constraint_range, check); | 475 InsertConstraintFor(use, defn, constraint_range, check); |
| 484 } | 476 } |
| 485 | 477 |
| 486 | 478 |
| 487 void RangeAnalysis::InsertConstraints() { | 479 void RangeAnalysis::InsertConstraints() { |
| 488 for (intptr_t i = 0; i < values_.length(); i++) { | 480 for (intptr_t i = 0; i < values_.length(); i++) { |
| 489 InsertConstraintsFor(values_[i]); | 481 InsertConstraintsFor(values_[i]); |
| 490 } | 482 } |
| 491 | 483 |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 536 } | 528 } |
| 537 | 529 |
| 538 return range; | 530 return range; |
| 539 } | 531 } |
| 540 | 532 |
| 541 | 533 |
| 542 static bool AreEqualDefinitions(Definition* a, Definition* b) { | 534 static bool AreEqualDefinitions(Definition* a, Definition* b) { |
| 543 a = UnwrapConstraint(a); | 535 a = UnwrapConstraint(a); |
| 544 b = UnwrapConstraint(b); | 536 b = UnwrapConstraint(b); |
| 545 return (a == b) || | 537 return (a == b) || |
| 546 (a->AllowsCSE() && | 538 (a->AllowsCSE() && a->Dependencies().IsNone() && b->AllowsCSE() && |
| 547 a->Dependencies().IsNone() && | 539 b->Dependencies().IsNone() && a->Equals(b)); |
| 548 b->AllowsCSE() && | |
| 549 b->Dependencies().IsNone() && | |
| 550 a->Equals(b)); | |
| 551 } | 540 } |
| 552 | 541 |
| 553 | 542 |
| 554 static bool DependOnSameSymbol(const RangeBoundary& a, const RangeBoundary& b) { | 543 static bool DependOnSameSymbol(const RangeBoundary& a, const RangeBoundary& b) { |
| 555 return a.IsSymbol() && b.IsSymbol() && | 544 return a.IsSymbol() && b.IsSymbol() && |
| 556 AreEqualDefinitions(a.symbol(), b.symbol()); | 545 AreEqualDefinitions(a.symbol(), b.symbol()); |
| 557 } | 546 } |
| 558 | 547 |
| 559 | 548 |
| 560 // Given the current range of a phi and a newly computed range check | 549 // Given the current range of a phi and a newly computed range check |
| 561 // if it is growing towards negative infinity, if it does widen it to | 550 // if it is growing towards negative infinity, if it does widen it to |
| 562 // MinSmi. | 551 // MinSmi. |
| 563 static RangeBoundary WidenMin(const Range* range, | 552 static RangeBoundary WidenMin(const Range* range, |
| 564 const Range* new_range, | 553 const Range* new_range, |
| 565 RangeBoundary::RangeSize size) { | 554 RangeBoundary::RangeSize size) { |
| 566 RangeBoundary min = range->min(); | 555 RangeBoundary min = range->min(); |
| 567 RangeBoundary new_min = new_range->min(); | 556 RangeBoundary new_min = new_range->min(); |
| 568 | 557 |
| 569 if (min.IsSymbol()) { | 558 if (min.IsSymbol()) { |
| 570 if (min.LowerBound().Overflowed(size)) { | 559 if (min.LowerBound().Overflowed(size)) { |
| 571 return RangeBoundary::MinConstant(size); | 560 return RangeBoundary::MinConstant(size); |
| 572 } else if (DependOnSameSymbol(min, new_min)) { | 561 } else if (DependOnSameSymbol(min, new_min)) { |
| 573 return min.offset() <= new_min.offset() ? | 562 return min.offset() <= new_min.offset() |
| 574 min : RangeBoundary::MinConstant(size); | 563 ? min |
| 564 : RangeBoundary::MinConstant(size); |
| 575 } else if (min.UpperBound(size) <= new_min.LowerBound(size)) { | 565 } else if (min.UpperBound(size) <= new_min.LowerBound(size)) { |
| 576 return min; | 566 return min; |
| 577 } | 567 } |
| 578 } | 568 } |
| 579 | 569 |
| 580 min = Range::ConstantMin(range, size); | 570 min = Range::ConstantMin(range, size); |
| 581 new_min = Range::ConstantMin(new_range, size); | 571 new_min = Range::ConstantMin(new_range, size); |
| 582 | 572 |
| 583 return (min.ConstantValue() <= new_min.ConstantValue()) ? | 573 return (min.ConstantValue() <= new_min.ConstantValue()) |
| 584 min : RangeBoundary::MinConstant(size); | 574 ? min |
| 575 : RangeBoundary::MinConstant(size); |
| 585 } | 576 } |
| 586 | 577 |
| 587 // Given the current range of a phi and a newly computed range check | 578 // Given the current range of a phi and a newly computed range check |
| 588 // if it is growing towards positive infinity, if it does widen it to | 579 // if it is growing towards positive infinity, if it does widen it to |
| 589 // MaxSmi. | 580 // MaxSmi. |
| 590 static RangeBoundary WidenMax(const Range* range, | 581 static RangeBoundary WidenMax(const Range* range, |
| 591 const Range* new_range, | 582 const Range* new_range, |
| 592 RangeBoundary::RangeSize size) { | 583 RangeBoundary::RangeSize size) { |
| 593 RangeBoundary max = range->max(); | 584 RangeBoundary max = range->max(); |
| 594 RangeBoundary new_max = new_range->max(); | 585 RangeBoundary new_max = new_range->max(); |
| 595 | 586 |
| 596 if (max.IsSymbol()) { | 587 if (max.IsSymbol()) { |
| 597 if (max.UpperBound().Overflowed(size)) { | 588 if (max.UpperBound().Overflowed(size)) { |
| 598 return RangeBoundary::MaxConstant(size); | 589 return RangeBoundary::MaxConstant(size); |
| 599 } else if (DependOnSameSymbol(max, new_max)) { | 590 } else if (DependOnSameSymbol(max, new_max)) { |
| 600 return max.offset() >= new_max.offset() ? | 591 return max.offset() >= new_max.offset() |
| 601 max : RangeBoundary::MaxConstant(size); | 592 ? max |
| 593 : RangeBoundary::MaxConstant(size); |
| 602 } else if (max.LowerBound(size) >= new_max.UpperBound(size)) { | 594 } else if (max.LowerBound(size) >= new_max.UpperBound(size)) { |
| 603 return max; | 595 return max; |
| 604 } | 596 } |
| 605 } | 597 } |
| 606 | 598 |
| 607 max = Range::ConstantMax(range, size); | 599 max = Range::ConstantMax(range, size); |
| 608 new_max = Range::ConstantMax(new_range, size); | 600 new_max = Range::ConstantMax(new_range, size); |
| 609 | 601 |
| 610 return (max.ConstantValue() >= new_max.ConstantValue()) ? | 602 return (max.ConstantValue() >= new_max.ConstantValue()) |
| 611 max : RangeBoundary::MaxConstant(size); | 603 ? max |
| 604 : RangeBoundary::MaxConstant(size); |
| 612 } | 605 } |
| 613 | 606 |
| 614 | 607 |
| 615 // Given the current range of a phi and a newly computed range check | 608 // Given the current range of a phi and a newly computed range check |
| 616 // if we can perform narrowing: use newly computed minimum to improve precision | 609 // if we can perform narrowing: use newly computed minimum to improve precision |
| 617 // of the computed range. We do it only if current minimum was widened and is | 610 // of the computed range. We do it only if current minimum was widened and is |
| 618 // equal to MinSmi. | 611 // equal to MinSmi. |
| 619 // Newly computed minimum is expected to be greater or equal than old one as | 612 // Newly computed minimum is expected to be greater or equal than old one as |
| 620 // we are running after widening phase. | 613 // we are running after widening phase. |
| 621 static RangeBoundary NarrowMin(const Range* range, | 614 static RangeBoundary NarrowMin(const Range* range, |
| (...skipping 21 matching lines...) Expand all Loading... |
| 643 const RangeBoundary new_max = Range::ConstantMax(new_range, size); | 636 const RangeBoundary new_max = Range::ConstantMax(new_range, size); |
| 644 if (max.ConstantValue() < new_max.ConstantValue()) return range->max(); | 637 if (max.ConstantValue() < new_max.ConstantValue()) return range->max(); |
| 645 | 638 |
| 646 // TODO(vegorov): consider using positive infinity to indicate widened bound. | 639 // TODO(vegorov): consider using positive infinity to indicate widened bound. |
| 647 return range->max().IsMaximumOrAbove(size) ? new_range->max() : range->max(); | 640 return range->max().IsMaximumOrAbove(size) ? new_range->max() : range->max(); |
| 648 } | 641 } |
| 649 | 642 |
| 650 | 643 |
| 651 char RangeAnalysis::OpPrefix(JoinOperator op) { | 644 char RangeAnalysis::OpPrefix(JoinOperator op) { |
| 652 switch (op) { | 645 switch (op) { |
| 653 case WIDEN: return 'W'; | 646 case WIDEN: |
| 654 case NARROW: return 'N'; | 647 return 'W'; |
| 655 case NONE: return 'I'; | 648 case NARROW: |
| 649 return 'N'; |
| 650 case NONE: |
| 651 return 'I'; |
| 656 } | 652 } |
| 657 UNREACHABLE(); | 653 UNREACHABLE(); |
| 658 return ' '; | 654 return ' '; |
| 659 } | 655 } |
| 660 | 656 |
| 661 | 657 |
| 662 static RangeBoundary::RangeSize RangeSizeForPhi(Definition* phi) { | 658 static RangeBoundary::RangeSize RangeSizeForPhi(Definition* phi) { |
| 663 ASSERT(phi->IsPhi()); | 659 ASSERT(phi->IsPhi()); |
| 664 if (phi->Type()->ToCid() == kSmiCid) { | 660 if (phi->Type()->ToCid() == kSmiCid) { |
| 665 return RangeBoundary::kRangeBoundarySmi; | 661 return RangeBoundary::kRangeBoundarySmi; |
| (...skipping 22 matching lines...) Expand all Loading... |
| 688 WidenMax(defn->range(), &range, size)); | 684 WidenMax(defn->range(), &range, size)); |
| 689 } else if (op == NARROW) { | 685 } else if (op == NARROW) { |
| 690 range = Range(NarrowMin(defn->range(), &range, size), | 686 range = Range(NarrowMin(defn->range(), &range, size), |
| 691 NarrowMax(defn->range(), &range, size)); | 687 NarrowMax(defn->range(), &range, size)); |
| 692 } | 688 } |
| 693 } | 689 } |
| 694 | 690 |
| 695 if (!range.Equals(defn->range())) { | 691 if (!range.Equals(defn->range())) { |
| 696 #ifndef PRODUCT | 692 #ifndef PRODUCT |
| 697 if (FLAG_support_il_printer && FLAG_trace_range_analysis) { | 693 if (FLAG_support_il_printer && FLAG_trace_range_analysis) { |
| 698 THR_Print("%c [%" Pd "] %s: %s => %s\n", | 694 THR_Print("%c [%" Pd "] %s: %s => %s\n", OpPrefix(op), iteration, |
| 699 OpPrefix(op), | 695 defn->ToCString(), Range::ToCString(defn->range()), |
| 700 iteration, | |
| 701 defn->ToCString(), | |
| 702 Range::ToCString(defn->range()), | |
| 703 Range::ToCString(&range)); | 696 Range::ToCString(&range)); |
| 704 } | 697 } |
| 705 #endif // !PRODUCT | 698 #endif // !PRODUCT |
| 706 defn->set_range(range); | 699 defn->set_range(range); |
| 707 return true; | 700 return true; |
| 708 } | 701 } |
| 709 } | 702 } |
| 710 | 703 |
| 711 return false; | 704 return false; |
| 712 } | 705 } |
| 713 | 706 |
| 714 | 707 |
| 715 void RangeAnalysis::CollectDefinitions(BitVector* set) { | 708 void RangeAnalysis::CollectDefinitions(BitVector* set) { |
| 716 for (BlockIterator block_it = flow_graph_->reverse_postorder_iterator(); | 709 for (BlockIterator block_it = flow_graph_->reverse_postorder_iterator(); |
| 717 !block_it.Done(); | 710 !block_it.Done(); block_it.Advance()) { |
| 718 block_it.Advance()) { | |
| 719 BlockEntryInstr* block = block_it.Current(); | 711 BlockEntryInstr* block = block_it.Current(); |
| 720 | 712 |
| 721 JoinEntryInstr* join = block->AsJoinEntry(); | 713 JoinEntryInstr* join = block->AsJoinEntry(); |
| 722 if (join != NULL) { | 714 if (join != NULL) { |
| 723 for (PhiIterator it(join); !it.Done(); it.Advance()) { | 715 for (PhiIterator it(join); !it.Done(); it.Advance()) { |
| 724 PhiInstr* phi = it.Current(); | 716 PhiInstr* phi = it.Current(); |
| 725 if (set->Contains(phi->ssa_temp_index())) { | 717 if (set->Contains(phi->ssa_temp_index())) { |
| 726 definitions_.Add(phi); | 718 definitions_.Add(phi); |
| 727 } | 719 } |
| 728 } | 720 } |
| 729 } | 721 } |
| 730 | 722 |
| 731 for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) { | 723 for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) { |
| 732 Definition* defn = it.Current()->AsDefinition(); | 724 Definition* defn = it.Current()->AsDefinition(); |
| 733 if ((defn != NULL) && | 725 if ((defn != NULL) && defn->HasSSATemp() && |
| 734 defn->HasSSATemp() && | |
| 735 set->Contains(defn->ssa_temp_index())) { | 726 set->Contains(defn->ssa_temp_index())) { |
| 736 definitions_.Add(defn); | 727 definitions_.Add(defn); |
| 737 } | 728 } |
| 738 } | 729 } |
| 739 } | 730 } |
| 740 } | 731 } |
| 741 | 732 |
| 742 | 733 |
| 743 void RangeAnalysis::Iterate(JoinOperator op, intptr_t max_iterations) { | 734 void RangeAnalysis::Iterate(JoinOperator op, intptr_t max_iterations) { |
| 744 // TODO(vegorov): switch to worklist if this becomes performance bottleneck. | 735 // TODO(vegorov): switch to worklist if this becomes performance bottleneck. |
| (...skipping 12 matching lines...) Expand all Loading... |
| 757 } while (changed && (iteration < max_iterations)); | 748 } while (changed && (iteration < max_iterations)); |
| 758 } | 749 } |
| 759 | 750 |
| 760 | 751 |
| 761 void RangeAnalysis::InferRanges() { | 752 void RangeAnalysis::InferRanges() { |
| 762 if (FLAG_trace_range_analysis) { | 753 if (FLAG_trace_range_analysis) { |
| 763 FlowGraphPrinter::PrintGraph("Range Analysis (BEFORE)", flow_graph_); | 754 FlowGraphPrinter::PrintGraph("Range Analysis (BEFORE)", flow_graph_); |
| 764 } | 755 } |
| 765 Zone* zone = flow_graph_->zone(); | 756 Zone* zone = flow_graph_->zone(); |
| 766 // Initialize bitvector for quick filtering of int values. | 757 // Initialize bitvector for quick filtering of int values. |
| 767 BitVector* set = new(zone) BitVector(zone, | 758 BitVector* set = |
| 768 flow_graph_->current_ssa_temp_index()); | 759 new (zone) BitVector(zone, flow_graph_->current_ssa_temp_index()); |
| 769 for (intptr_t i = 0; i < values_.length(); i++) { | 760 for (intptr_t i = 0; i < values_.length(); i++) { |
| 770 set->Add(values_[i]->ssa_temp_index()); | 761 set->Add(values_[i]->ssa_temp_index()); |
| 771 } | 762 } |
| 772 for (intptr_t i = 0; i < constraints_.length(); i++) { | 763 for (intptr_t i = 0; i < constraints_.length(); i++) { |
| 773 set->Add(constraints_[i]->ssa_temp_index()); | 764 set->Add(constraints_[i]->ssa_temp_index()); |
| 774 } | 765 } |
| 775 | 766 |
| 776 // Collect integer definitions (including constraints) in the reverse | 767 // Collect integer definitions (including constraints) in the reverse |
| 777 // postorder. This improves convergence speed compared to iterating | 768 // postorder. This improves convergence speed compared to iterating |
| 778 // values_ and constraints_ array separately. | 769 // values_ and constraints_ array separately. |
| (...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 850 explicit Scheduler(FlowGraph* flow_graph) | 841 explicit Scheduler(FlowGraph* flow_graph) |
| 851 : flow_graph_(flow_graph), | 842 : flow_graph_(flow_graph), |
| 852 loop_headers_(flow_graph->LoopHeaders()), | 843 loop_headers_(flow_graph->LoopHeaders()), |
| 853 pre_headers_(loop_headers_.length()) { | 844 pre_headers_(loop_headers_.length()) { |
| 854 for (intptr_t i = 0; i < loop_headers_.length(); i++) { | 845 for (intptr_t i = 0; i < loop_headers_.length(); i++) { |
| 855 pre_headers_.Add(loop_headers_[i]->ImmediateDominator()); | 846 pre_headers_.Add(loop_headers_[i]->ImmediateDominator()); |
| 856 } | 847 } |
| 857 } | 848 } |
| 858 | 849 |
| 859 // Clear the list of emitted instructions. | 850 // Clear the list of emitted instructions. |
| 860 void Start() { | 851 void Start() { emitted_.Clear(); } |
| 861 emitted_.Clear(); | |
| 862 } | |
| 863 | 852 |
| 864 // Given the floating instruction attempt to schedule it into one of the | 853 // Given the floating instruction attempt to schedule it into one of the |
| 865 // loop preheaders that dominates given post_dominator instruction. | 854 // loop preheaders that dominates given post_dominator instruction. |
| 866 // Some of the instruction inputs can potentially be unscheduled as well. | 855 // Some of the instruction inputs can potentially be unscheduled as well. |
| 867 // Returns NULL is the scheduling fails (e.g. inputs are not invariant for | 856 // Returns NULL is the scheduling fails (e.g. inputs are not invariant for |
| 868 // any loop containing post_dominator). | 857 // any loop containing post_dominator). |
| 869 // Resulting schedule should be equivalent to one obtained by inserting | 858 // Resulting schedule should be equivalent to one obtained by inserting |
| 870 // instructions right before post_dominator and running CSE and LICM passes. | 859 // instructions right before post_dominator and running CSE and LICM passes. |
| 871 template<typename T> | 860 template <typename T> |
| 872 T* Emit(T* instruction, Instruction* post_dominator) { | 861 T* Emit(T* instruction, Instruction* post_dominator) { |
| 873 return static_cast<T*>(EmitRecursively(instruction, post_dominator)); | 862 return static_cast<T*>(EmitRecursively(instruction, post_dominator)); |
| 874 } | 863 } |
| 875 | 864 |
| 876 // Undo all insertions recorded in the list of emitted instructions. | 865 // Undo all insertions recorded in the list of emitted instructions. |
| 877 void Rollback() { | 866 void Rollback() { |
| 878 for (intptr_t i = emitted_.length() - 1; i >= 0; i--) { | 867 for (intptr_t i = emitted_.length() - 1; i >= 0; i--) { |
| 879 emitted_[i]->RemoveFromGraph(); | 868 emitted_[i]->RemoveFromGraph(); |
| 880 } | 869 } |
| 881 emitted_.Clear(); | 870 emitted_.Clear(); |
| 882 } | 871 } |
| 883 | 872 |
| 884 private: | 873 private: |
| 885 typedef DirectChainedHashMap<PointerKeyValueTrait<Instruction> > Map; | 874 typedef DirectChainedHashMap<PointerKeyValueTrait<Instruction> > Map; |
| 886 | 875 |
| 887 Instruction* EmitRecursively(Instruction* instruction, | 876 Instruction* EmitRecursively(Instruction* instruction, Instruction* sink) { |
| 888 Instruction* sink) { | |
| 889 // Schedule all unscheduled inputs and unwrap all constrained inputs. | 877 // Schedule all unscheduled inputs and unwrap all constrained inputs. |
| 890 for (intptr_t i = 0; i < instruction->InputCount(); i++) { | 878 for (intptr_t i = 0; i < instruction->InputCount(); i++) { |
| 891 Definition* defn = instruction->InputAt(i)->definition(); | 879 Definition* defn = instruction->InputAt(i)->definition(); |
| 892 | 880 |
| 893 // Instruction is not in the graph yet which means that none of | 881 // Instruction is not in the graph yet which means that none of |
| 894 // its input uses should be recorded at defn's use chains. | 882 // its input uses should be recorded at defn's use chains. |
| 895 // Verify this assumption to ensure that we are not going to | 883 // Verify this assumption to ensure that we are not going to |
| 896 // leave use-lists in an inconsistent state when we start | 884 // leave use-lists in an inconsistent state when we start |
| 897 // rewriting inputs via set_definition. | 885 // rewriting inputs via set_definition. |
| 898 ASSERT(instruction->InputAt(i)->IsSingleUse() && | 886 ASSERT(instruction->InputAt(i)->IsSingleUse() && |
| 899 !defn->HasOnlyInputUse(instruction->InputAt(i))); | 887 !defn->HasOnlyInputUse(instruction->InputAt(i))); |
| 900 | 888 |
| 901 if (!defn->HasSSATemp()) { | 889 if (!defn->HasSSATemp()) { |
| 902 Definition* scheduled = Emit(defn, sink); | 890 Definition* scheduled = Emit(defn, sink); |
| 903 if (scheduled == NULL) { | 891 if (scheduled == NULL) { |
| 904 return NULL; | 892 return NULL; |
| 905 } | 893 } |
| 906 instruction->InputAt(i)->set_definition(scheduled); | 894 instruction->InputAt(i)->set_definition(scheduled); |
| 907 } else if (defn->IsConstraint()) { | 895 } else if (defn->IsConstraint()) { |
| 908 instruction->InputAt(i)->set_definition(UnwrapConstraint(defn)); | 896 instruction->InputAt(i)->set_definition(UnwrapConstraint(defn)); |
| 909 } | 897 } |
| 910 } | 898 } |
| 911 | 899 |
| 912 // Attempt to find equivalent instruction that was already scheduled. | 900 // Attempt to find equivalent instruction that was already scheduled. |
| 913 // If the instruction is still in the graph (it could have been | 901 // If the instruction is still in the graph (it could have been |
| 914 // un-scheduled by a rollback action) and it dominates the sink - use it. | 902 // un-scheduled by a rollback action) and it dominates the sink - use it. |
| 915 Instruction* emitted = map_.LookupValue(instruction); | 903 Instruction* emitted = map_.LookupValue(instruction); |
| 916 if (emitted != NULL && | 904 if (emitted != NULL && !emitted->WasEliminated() && |
| 917 !emitted->WasEliminated() && | |
| 918 sink->IsDominatedBy(emitted)) { | 905 sink->IsDominatedBy(emitted)) { |
| 919 return emitted; | 906 return emitted; |
| 920 } | 907 } |
| 921 | 908 |
| 922 // Attempt to find suitable pre-header. Iterate loop headers backwards to | 909 // Attempt to find suitable pre-header. Iterate loop headers backwards to |
| 923 // attempt scheduling into the outermost loop first. | 910 // attempt scheduling into the outermost loop first. |
| 924 for (intptr_t i = loop_headers_.length() - 1; i >= 0; i--) { | 911 for (intptr_t i = loop_headers_.length() - 1; i >= 0; i--) { |
| 925 BlockEntryInstr* header = loop_headers_[i]; | 912 BlockEntryInstr* header = loop_headers_[i]; |
| 926 BlockEntryInstr* pre_header = pre_headers_[i]; | 913 BlockEntryInstr* pre_header = pre_headers_[i]; |
| 927 | 914 |
| (...skipping 20 matching lines...) Expand all Loading... |
| 948 EmitTo(pre_header, instruction); | 935 EmitTo(pre_header, instruction); |
| 949 return instruction; | 936 return instruction; |
| 950 } | 937 } |
| 951 } | 938 } |
| 952 | 939 |
| 953 return NULL; | 940 return NULL; |
| 954 } | 941 } |
| 955 | 942 |
| 956 void EmitTo(BlockEntryInstr* block, Instruction* instr) { | 943 void EmitTo(BlockEntryInstr* block, Instruction* instr) { |
| 957 GotoInstr* last = block->last_instruction()->AsGoto(); | 944 GotoInstr* last = block->last_instruction()->AsGoto(); |
| 958 flow_graph_->InsertBefore(last, | 945 flow_graph_->InsertBefore( |
| 959 instr, | 946 last, instr, last->env(), |
| 960 last->env(), | 947 instr->IsDefinition() ? FlowGraph::kValue : FlowGraph::kEffect); |
| 961 instr->IsDefinition() ? FlowGraph::kValue | |
| 962 : FlowGraph::kEffect); | |
| 963 instr->CopyDeoptIdFrom(*last); | 948 instr->CopyDeoptIdFrom(*last); |
| 964 instr->env()->set_deopt_id(instr->deopt_id_); | 949 instr->env()->set_deopt_id(instr->deopt_id_); |
| 965 | 950 |
| 966 map_.Insert(instr); | 951 map_.Insert(instr); |
| 967 emitted_.Add(instr); | 952 emitted_.Add(instr); |
| 968 } | 953 } |
| 969 | 954 |
| 970 FlowGraph* flow_graph_; | 955 FlowGraph* flow_graph_; |
| 971 Map map_; | 956 Map map_; |
| 972 const ZoneGrowableArray<BlockEntryInstr*>& loop_headers_; | 957 const ZoneGrowableArray<BlockEntryInstr*>& loop_headers_; |
| 973 GrowableArray<BlockEntryInstr*> pre_headers_; | 958 GrowableArray<BlockEntryInstr*> pre_headers_; |
| 974 GrowableArray<Instruction*> emitted_; | 959 GrowableArray<Instruction*> emitted_; |
| 975 }; | 960 }; |
| 976 | 961 |
| 977 | 962 |
| 978 // If bounds check 0 <= index < length is not redundant we attempt to | 963 // If bounds check 0 <= index < length is not redundant we attempt to |
| 979 // replace it with a sequence of checks that guarantee | 964 // replace it with a sequence of checks that guarantee |
| 980 // | 965 // |
| 981 // 0 <= LowerBound(index) < UpperBound(index) < length | 966 // 0 <= LowerBound(index) < UpperBound(index) < length |
| 982 // | 967 // |
| 983 // and hoist all of those checks out of the enclosing loop. | 968 // and hoist all of those checks out of the enclosing loop. |
| 984 // | 969 // |
| 985 // Upper/Lower bounds are symbolic arithmetic expressions with +, -, * | 970 // Upper/Lower bounds are symbolic arithmetic expressions with +, -, * |
| 986 // operations. | 971 // operations. |
| 987 class BoundsCheckGeneralizer { | 972 class BoundsCheckGeneralizer { |
| 988 public: | 973 public: |
| 989 BoundsCheckGeneralizer(RangeAnalysis* range_analysis, | 974 BoundsCheckGeneralizer(RangeAnalysis* range_analysis, FlowGraph* flow_graph) |
| 990 FlowGraph* flow_graph) | |
| 991 : range_analysis_(range_analysis), | 975 : range_analysis_(range_analysis), |
| 992 flow_graph_(flow_graph), | 976 flow_graph_(flow_graph), |
| 993 scheduler_(flow_graph) { } | 977 scheduler_(flow_graph) {} |
| 994 | 978 |
| 995 void TryGeneralize(CheckArrayBoundInstr* check, | 979 void TryGeneralize(CheckArrayBoundInstr* check, |
| 996 const RangeBoundary& array_length) { | 980 const RangeBoundary& array_length) { |
| 997 Definition* upper_bound = | 981 Definition* upper_bound = |
| 998 ConstructUpperBound(check->index()->definition(), check); | 982 ConstructUpperBound(check->index()->definition(), check); |
| 999 if (upper_bound == UnwrapConstraint(check->index()->definition())) { | 983 if (upper_bound == UnwrapConstraint(check->index()->definition())) { |
| 1000 // Unable to construct upper bound for the index. | 984 // Unable to construct upper bound for the index. |
| 1001 if (FLAG_support_il_printer && FLAG_trace_range_analysis) { | 985 if (FLAG_support_il_printer && FLAG_trace_range_analysis) { |
| 1002 THR_Print("Failed to construct upper bound for %s index\n", | 986 THR_Print("Failed to construct upper bound for %s index\n", |
| 1003 check->ToCString()); | 987 check->ToCString()); |
| (...skipping 15 matching lines...) Expand all Loading... |
| 1019 range_analysis_->AssignRangesRecursively(upper_bound); | 1003 range_analysis_->AssignRangesRecursively(upper_bound); |
| 1020 | 1004 |
| 1021 // We are going to constrain any symbols participating in + and * operations | 1005 // We are going to constrain any symbols participating in + and * operations |
| 1022 // to guarantee that they are positive. Find all symbols that need | 1006 // to guarantee that they are positive. Find all symbols that need |
| 1023 // constraining. If there is a subtraction subexpression with non-positive | 1007 // constraining. If there is a subtraction subexpression with non-positive |
| 1024 // range give up on generalization for simplicity. | 1008 // range give up on generalization for simplicity. |
| 1025 GrowableArray<Definition*> non_positive_symbols; | 1009 GrowableArray<Definition*> non_positive_symbols; |
| 1026 if (!FindNonPositiveSymbols(&non_positive_symbols, upper_bound)) { | 1010 if (!FindNonPositiveSymbols(&non_positive_symbols, upper_bound)) { |
| 1027 #ifndef PRODUCT | 1011 #ifndef PRODUCT |
| 1028 if (FLAG_support_il_printer && FLAG_trace_range_analysis) { | 1012 if (FLAG_support_il_printer && FLAG_trace_range_analysis) { |
| 1029 THR_Print("Failed to generalize %s index to %s" | 1013 THR_Print( |
| 1030 " (can't ensure positivity)\n", | 1014 "Failed to generalize %s index to %s" |
| 1031 check->ToCString(), | 1015 " (can't ensure positivity)\n", |
| 1032 IndexBoundToCString(upper_bound)); | 1016 check->ToCString(), IndexBoundToCString(upper_bound)); |
| 1033 } | 1017 } |
| 1034 #endif // !PRODUCT | 1018 #endif // !PRODUCT |
| 1035 return; | 1019 return; |
| 1036 } | 1020 } |
| 1037 | 1021 |
| 1038 // Check that we can statically prove that lower bound of the index is | 1022 // Check that we can statically prove that lower bound of the index is |
| 1039 // non-negative under the assumption that all potentially non-positive | 1023 // non-negative under the assumption that all potentially non-positive |
| 1040 // symbols are positive. | 1024 // symbols are positive. |
| 1041 GrowableArray<ConstraintInstr*> positive_constraints( | 1025 GrowableArray<ConstraintInstr*> positive_constraints( |
| 1042 non_positive_symbols.length()); | 1026 non_positive_symbols.length()); |
| 1043 Range* positive_range = new Range( | 1027 Range* positive_range = |
| 1044 RangeBoundary::FromConstant(0), | 1028 new Range(RangeBoundary::FromConstant(0), |
| 1045 RangeBoundary::MaxConstant(RangeBoundary::kRangeBoundarySmi)); | 1029 RangeBoundary::MaxConstant(RangeBoundary::kRangeBoundarySmi)); |
| 1046 for (intptr_t i = 0; i < non_positive_symbols.length(); i++) { | 1030 for (intptr_t i = 0; i < non_positive_symbols.length(); i++) { |
| 1047 Definition* symbol = non_positive_symbols[i]; | 1031 Definition* symbol = non_positive_symbols[i]; |
| 1048 positive_constraints.Add(new ConstraintInstr( | 1032 positive_constraints.Add( |
| 1049 new Value(symbol), | 1033 new ConstraintInstr(new Value(symbol), positive_range)); |
| 1050 positive_range)); | |
| 1051 } | 1034 } |
| 1052 | 1035 |
| 1053 Definition* lower_bound = | 1036 Definition* lower_bound = |
| 1054 ConstructLowerBound(check->index()->definition(), check); | 1037 ConstructLowerBound(check->index()->definition(), check); |
| 1055 // No need to simplify lower bound before applying constraints as | 1038 // No need to simplify lower bound before applying constraints as |
| 1056 // we are not going to emit it. | 1039 // we are not going to emit it. |
| 1057 lower_bound = ApplyConstraints(lower_bound, check, &positive_constraints); | 1040 lower_bound = ApplyConstraints(lower_bound, check, &positive_constraints); |
| 1058 range_analysis_->AssignRangesRecursively(lower_bound); | 1041 range_analysis_->AssignRangesRecursively(lower_bound); |
| 1059 | 1042 |
| 1060 if (!RangeUtils::IsPositive(lower_bound->range())) { | 1043 if (!RangeUtils::IsPositive(lower_bound->range())) { |
| 1061 // Can't prove that lower bound is positive even with additional checks | 1044 // Can't prove that lower bound is positive even with additional checks |
| 1062 // against potentially non-positive symbols. Give up. | 1045 // against potentially non-positive symbols. Give up. |
| 1063 #ifndef PRODUCT | 1046 #ifndef PRODUCT |
| 1064 if (FLAG_support_il_printer && FLAG_trace_range_analysis) { | 1047 if (FLAG_support_il_printer && FLAG_trace_range_analysis) { |
| 1065 THR_Print("Failed to generalize %s index to %s" | 1048 THR_Print( |
| 1066 " (lower bound is not positive)\n", | 1049 "Failed to generalize %s index to %s" |
| 1067 check->ToCString(), | 1050 " (lower bound is not positive)\n", |
| 1068 IndexBoundToCString(upper_bound)); | 1051 check->ToCString(), IndexBoundToCString(upper_bound)); |
| 1069 } | 1052 } |
| 1070 #endif // !PRODUCT | 1053 #endif // !PRODUCT |
| 1071 return; | 1054 return; |
| 1072 } | 1055 } |
| 1073 | 1056 |
| 1074 #ifndef PRODUCT | 1057 #ifndef PRODUCT |
| 1075 if (FLAG_support_il_printer && FLAG_trace_range_analysis) { | 1058 if (FLAG_support_il_printer && FLAG_trace_range_analysis) { |
| 1076 THR_Print("For %s computed index bounds [%s, %s]\n", | 1059 THR_Print("For %s computed index bounds [%s, %s]\n", check->ToCString(), |
| 1077 check->ToCString(), | |
| 1078 IndexBoundToCString(lower_bound), | 1060 IndexBoundToCString(lower_bound), |
| 1079 IndexBoundToCString(upper_bound)); | 1061 IndexBoundToCString(upper_bound)); |
| 1080 } | 1062 } |
| 1081 #endif // !PRODUCT | 1063 #endif // !PRODUCT |
| 1082 | 1064 |
| 1083 // At this point we know that 0 <= index < UpperBound(index) under | 1065 // At this point we know that 0 <= index < UpperBound(index) under |
| 1084 // certain preconditions. Start by emitting this preconditions. | 1066 // certain preconditions. Start by emitting this preconditions. |
| 1085 scheduler_.Start(); | 1067 scheduler_.Start(); |
| 1086 | 1068 |
| 1087 ConstantInstr* max_smi = | 1069 ConstantInstr* max_smi = |
| 1088 flow_graph_->GetConstant(Smi::Handle(Smi::New(Smi::kMaxValue))); | 1070 flow_graph_->GetConstant(Smi::Handle(Smi::New(Smi::kMaxValue))); |
| 1089 for (intptr_t i = 0; i < non_positive_symbols.length(); i++) { | 1071 for (intptr_t i = 0; i < non_positive_symbols.length(); i++) { |
| 1090 CheckArrayBoundInstr* precondition = new CheckArrayBoundInstr( | 1072 CheckArrayBoundInstr* precondition = new CheckArrayBoundInstr( |
| 1091 new Value(max_smi), | 1073 new Value(max_smi), new Value(non_positive_symbols[i]), |
| 1092 new Value(non_positive_symbols[i]), | |
| 1093 Thread::kNoDeoptId); | 1074 Thread::kNoDeoptId); |
| 1094 precondition->mark_generalized(); | 1075 precondition->mark_generalized(); |
| 1095 precondition = scheduler_.Emit(precondition, check); | 1076 precondition = scheduler_.Emit(precondition, check); |
| 1096 if (precondition == NULL) { | 1077 if (precondition == NULL) { |
| 1097 if (FLAG_trace_range_analysis) { | 1078 if (FLAG_trace_range_analysis) { |
| 1098 THR_Print(" => failed to insert positivity constraint\n"); | 1079 THR_Print(" => failed to insert positivity constraint\n"); |
| 1099 } | 1080 } |
| 1100 scheduler_.Rollback(); | 1081 scheduler_.Rollback(); |
| 1101 return; | 1082 return; |
| 1102 } | 1083 } |
| 1103 } | 1084 } |
| 1104 | 1085 |
| 1105 CheckArrayBoundInstr* new_check = new CheckArrayBoundInstr( | 1086 CheckArrayBoundInstr* new_check = new CheckArrayBoundInstr( |
| 1106 new Value(UnwrapConstraint(check->length()->definition())), | 1087 new Value(UnwrapConstraint(check->length()->definition())), |
| 1107 new Value(upper_bound), | 1088 new Value(upper_bound), Thread::kNoDeoptId); |
| 1108 Thread::kNoDeoptId); | |
| 1109 new_check->mark_generalized(); | 1089 new_check->mark_generalized(); |
| 1110 if (new_check->IsRedundant(array_length)) { | 1090 if (new_check->IsRedundant(array_length)) { |
| 1111 if (FLAG_trace_range_analysis) { | 1091 if (FLAG_trace_range_analysis) { |
| 1112 THR_Print(" => generalized check is redundant\n"); | 1092 THR_Print(" => generalized check is redundant\n"); |
| 1113 } | 1093 } |
| 1114 RemoveGeneralizedCheck(check); | 1094 RemoveGeneralizedCheck(check); |
| 1115 return; | 1095 return; |
| 1116 } | 1096 } |
| 1117 | 1097 |
| 1118 new_check = scheduler_.Emit(new_check, check); | 1098 new_check = scheduler_.Emit(new_check, check); |
| 1119 if (new_check != NULL) { | 1099 if (new_check != NULL) { |
| 1120 if (FLAG_trace_range_analysis) { | 1100 if (FLAG_trace_range_analysis) { |
| 1121 THR_Print(" => generalized check was hoisted into B%" Pd "\n", | 1101 THR_Print(" => generalized check was hoisted into B%" Pd "\n", |
| 1122 new_check->GetBlock()->block_id()); | 1102 new_check->GetBlock()->block_id()); |
| 1123 } | 1103 } |
| 1124 RemoveGeneralizedCheck(check); | 1104 RemoveGeneralizedCheck(check); |
| 1125 } else { | 1105 } else { |
| 1126 if (FLAG_trace_range_analysis) { | 1106 if (FLAG_trace_range_analysis) { |
| 1127 THR_Print(" => generalized check can't be hoisted\n"); | 1107 THR_Print(" => generalized check can't be hoisted\n"); |
| 1128 } | 1108 } |
| 1129 scheduler_.Rollback(); | 1109 scheduler_.Rollback(); |
| 1130 } | 1110 } |
| 1131 } | 1111 } |
| 1132 | 1112 |
| 1133 static void RemoveGeneralizedCheck(CheckArrayBoundInstr* check) { | 1113 static void RemoveGeneralizedCheck(CheckArrayBoundInstr* check) { |
| 1134 BinarySmiOpInstr* binary_op = | 1114 BinarySmiOpInstr* binary_op = check->index()->definition()->AsBinarySmiOp(); |
| 1135 check->index()->definition()->AsBinarySmiOp(); | |
| 1136 if (binary_op != NULL) { | 1115 if (binary_op != NULL) { |
| 1137 binary_op->set_can_overflow(false); | 1116 binary_op->set_can_overflow(false); |
| 1138 } | 1117 } |
| 1139 check->RemoveFromGraph(); | 1118 check->RemoveFromGraph(); |
| 1140 } | 1119 } |
| 1141 | 1120 |
| 1142 private: | 1121 private: |
| 1143 BinarySmiOpInstr* MakeBinaryOp(Token::Kind op_kind, | 1122 BinarySmiOpInstr* MakeBinaryOp(Token::Kind op_kind, |
| 1144 Definition* left, | 1123 Definition* left, |
| 1145 Definition* right) { | 1124 Definition* right) { |
| 1146 return new BinarySmiOpInstr(op_kind, | 1125 return new BinarySmiOpInstr(op_kind, new Value(left), new Value(right), |
| 1147 new Value(left), | |
| 1148 new Value(right), | |
| 1149 Thread::kNoDeoptId); | 1126 Thread::kNoDeoptId); |
| 1150 } | 1127 } |
| 1151 | 1128 |
| 1152 | 1129 |
| 1153 BinarySmiOpInstr* MakeBinaryOp(Token::Kind op_kind, | 1130 BinarySmiOpInstr* MakeBinaryOp(Token::Kind op_kind, |
| 1154 Definition* left, | 1131 Definition* left, |
| 1155 intptr_t right) { | 1132 intptr_t right) { |
| 1156 ConstantInstr* constant_right = | 1133 ConstantInstr* constant_right = |
| 1157 flow_graph_->GetConstant(Smi::Handle(Smi::New(right))); | 1134 flow_graph_->GetConstant(Smi::Handle(Smi::New(right))); |
| 1158 return MakeBinaryOp(op_kind, left, constant_right); | 1135 return MakeBinaryOp(op_kind, left, constant_right); |
| 1159 } | 1136 } |
| 1160 | 1137 |
| 1161 Definition* RangeBoundaryToDefinition(const RangeBoundary& bound) { | 1138 Definition* RangeBoundaryToDefinition(const RangeBoundary& bound) { |
| 1162 Definition* symbol = UnwrapConstraint(bound.symbol()); | 1139 Definition* symbol = UnwrapConstraint(bound.symbol()); |
| 1163 if (bound.offset() == 0) { | 1140 if (bound.offset() == 0) { |
| 1164 return symbol; | 1141 return symbol; |
| 1165 } else { | 1142 } else { |
| 1166 return MakeBinaryOp(Token::kADD, symbol, bound.offset()); | 1143 return MakeBinaryOp(Token::kADD, symbol, bound.offset()); |
| 1167 } | 1144 } |
| 1168 } | 1145 } |
| 1169 | 1146 |
| 1170 typedef Definition* (BoundsCheckGeneralizer::*PhiBoundFunc)( | 1147 typedef Definition* (BoundsCheckGeneralizer::*PhiBoundFunc)(PhiInstr*, |
| 1171 PhiInstr*, Instruction*); | 1148 Instruction*); |
| 1172 | 1149 |
| 1173 // Construct symbolic lower bound for a value at the given point. | 1150 // Construct symbolic lower bound for a value at the given point. |
| 1174 Definition* ConstructLowerBound(Definition* value, Instruction* point) { | 1151 Definition* ConstructLowerBound(Definition* value, Instruction* point) { |
| 1175 return ConstructBound(&BoundsCheckGeneralizer::InductionVariableLowerBound, | 1152 return ConstructBound(&BoundsCheckGeneralizer::InductionVariableLowerBound, |
| 1176 value, | 1153 value, point); |
| 1177 point); | |
| 1178 } | 1154 } |
| 1179 | 1155 |
| 1180 // Construct symbolic upper bound for a value at the given point. | 1156 // Construct symbolic upper bound for a value at the given point. |
| 1181 Definition* ConstructUpperBound(Definition* value, Instruction* point) { | 1157 Definition* ConstructUpperBound(Definition* value, Instruction* point) { |
| 1182 return ConstructBound(&BoundsCheckGeneralizer::InductionVariableUpperBound, | 1158 return ConstructBound(&BoundsCheckGeneralizer::InductionVariableUpperBound, |
| 1183 value, | 1159 value, point); |
| 1184 point); | |
| 1185 } | 1160 } |
| 1186 | 1161 |
| 1187 // Construct symbolic bound for a value at the given point: | 1162 // Construct symbolic bound for a value at the given point: |
| 1188 // | 1163 // |
| 1189 // 1. if value is an induction variable use its bounds; | 1164 // 1. if value is an induction variable use its bounds; |
| 1190 // 2. if value is addition or multiplication construct bounds for left | 1165 // 2. if value is addition or multiplication construct bounds for left |
| 1191 // and right hand sides separately and use addition/multiplication | 1166 // and right hand sides separately and use addition/multiplication |
| 1192 // of bounds as a bound (addition and multiplication are monotone | 1167 // of bounds as a bound (addition and multiplication are monotone |
| 1193 // operations for both operands); | 1168 // operations for both operands); |
| 1194 // 3. if value is a substraction then construct bound for the left hand | 1169 // 3. if value is a substraction then construct bound for the left hand |
| (...skipping 10 matching lines...) Expand all Loading... |
| 1205 if (phi->induction_variable_info() != NULL) { | 1180 if (phi->induction_variable_info() != NULL) { |
| 1206 return (this->*phi_bound_func)(phi, point); | 1181 return (this->*phi_bound_func)(phi, point); |
| 1207 } | 1182 } |
| 1208 } else if (value->IsBinarySmiOp()) { | 1183 } else if (value->IsBinarySmiOp()) { |
| 1209 BinarySmiOpInstr* bin_op = value->AsBinarySmiOp(); | 1184 BinarySmiOpInstr* bin_op = value->AsBinarySmiOp(); |
| 1210 if ((bin_op->op_kind() == Token::kADD) || | 1185 if ((bin_op->op_kind() == Token::kADD) || |
| 1211 (bin_op->op_kind() == Token::kMUL) || | 1186 (bin_op->op_kind() == Token::kMUL) || |
| 1212 (bin_op->op_kind() == Token::kSUB)) { | 1187 (bin_op->op_kind() == Token::kSUB)) { |
| 1213 Definition* new_left = | 1188 Definition* new_left = |
| 1214 ConstructBound(phi_bound_func, bin_op->left()->definition(), point); | 1189 ConstructBound(phi_bound_func, bin_op->left()->definition(), point); |
| 1215 Definition* new_right = (bin_op->op_kind() != Token::kSUB) | 1190 Definition* new_right = |
| 1216 ? ConstructBound(phi_bound_func, | 1191 (bin_op->op_kind() != Token::kSUB) |
| 1217 bin_op->right()->definition(), | 1192 ? ConstructBound(phi_bound_func, bin_op->right()->definition(), |
| 1218 point) | 1193 point) |
| 1219 : UnwrapConstraint(bin_op->right()->definition()); | 1194 : UnwrapConstraint(bin_op->right()->definition()); |
| 1220 | 1195 |
| 1221 if ((new_left != UnwrapConstraint(bin_op->left()->definition())) || | 1196 if ((new_left != UnwrapConstraint(bin_op->left()->definition())) || |
| 1222 (new_right != UnwrapConstraint(bin_op->right()->definition()))) { | 1197 (new_right != UnwrapConstraint(bin_op->right()->definition()))) { |
| 1223 return MakeBinaryOp(bin_op->op_kind(), new_left, new_right); | 1198 return MakeBinaryOp(bin_op->op_kind(), new_left, new_right); |
| 1224 } | 1199 } |
| 1225 } | 1200 } |
| 1226 } | 1201 } |
| 1227 | 1202 |
| 1228 return value; | 1203 return value; |
| 1229 } | 1204 } |
| 1230 | 1205 |
| 1231 Definition* InductionVariableUpperBound(PhiInstr* phi, | 1206 Definition* InductionVariableUpperBound(PhiInstr* phi, Instruction* point) { |
| 1232 Instruction* point) { | |
| 1233 const InductionVariableInfo& info = *phi->induction_variable_info(); | 1207 const InductionVariableInfo& info = *phi->induction_variable_info(); |
| 1234 if (info.bound() == phi) { | 1208 if (info.bound() == phi) { |
| 1235 if (point->IsDominatedBy(info.limit())) { | 1209 if (point->IsDominatedBy(info.limit())) { |
| 1236 // Given induction variable | 1210 // Given induction variable |
| 1237 // | 1211 // |
| 1238 // x <- phi(x0, x + 1) | 1212 // x <- phi(x0, x + 1) |
| 1239 // | 1213 // |
| 1240 // and a constraint x <= M that dominates the given | 1214 // and a constraint x <= M that dominates the given |
| 1241 // point we conclude that M is an upper bound for x. | 1215 // point we conclude that M is an upper bound for x. |
| 1242 return RangeBoundaryToDefinition(info.limit()->constraint()->max()); | 1216 return RangeBoundaryToDefinition(info.limit()->constraint()->max()); |
| 1243 } | 1217 } |
| 1244 } else { | 1218 } else { |
| 1245 const InductionVariableInfo& bound_info = | 1219 const InductionVariableInfo& bound_info = |
| 1246 *info.bound()->induction_variable_info(); | 1220 *info.bound()->induction_variable_info(); |
| 1247 if (point->IsDominatedBy(bound_info.limit())) { | 1221 if (point->IsDominatedBy(bound_info.limit())) { |
| 1248 // Given two induction variables | 1222 // Given two induction variables |
| 1249 // | 1223 // |
| 1250 // x <- phi(x0, x + 1) | 1224 // x <- phi(x0, x + 1) |
| 1251 // y <- phi(y0, y + 1) | 1225 // y <- phi(y0, y + 1) |
| 1252 // | 1226 // |
| 1253 // and a constraint x <= M that dominates the given | 1227 // and a constraint x <= M that dominates the given |
| 1254 // point we can conclude that | 1228 // point we can conclude that |
| 1255 // | 1229 // |
| 1256 // y <= y0 + (M - x0) | 1230 // y <= y0 + (M - x0) |
| 1257 // | 1231 // |
| 1258 Definition* limit = RangeBoundaryToDefinition( | 1232 Definition* limit = |
| 1259 bound_info.limit()->constraint()->max()); | 1233 RangeBoundaryToDefinition(bound_info.limit()->constraint()->max()); |
| 1260 BinarySmiOpInstr* loop_length = | 1234 BinarySmiOpInstr* loop_length = MakeBinaryOp( |
| 1261 MakeBinaryOp(Token::kSUB, | 1235 Token::kSUB, ConstructUpperBound(limit, point), |
| 1262 ConstructUpperBound(limit, point), | 1236 ConstructLowerBound(bound_info.initial_value(), point)); |
| 1263 ConstructLowerBound(bound_info.initial_value(), | |
| 1264 point)); | |
| 1265 return MakeBinaryOp(Token::kADD, | 1237 return MakeBinaryOp(Token::kADD, |
| 1266 ConstructUpperBound(info.initial_value(), point), | 1238 ConstructUpperBound(info.initial_value(), point), |
| 1267 loop_length); | 1239 loop_length); |
| 1268 } | 1240 } |
| 1269 } | 1241 } |
| 1270 | 1242 |
| 1271 return phi; | 1243 return phi; |
| 1272 } | 1244 } |
| 1273 | 1245 |
| 1274 Definition* InductionVariableLowerBound(PhiInstr* phi, | 1246 Definition* InductionVariableLowerBound(PhiInstr* phi, Instruction* point) { |
| 1275 Instruction* point) { | |
| 1276 // Given induction variable | 1247 // Given induction variable |
| 1277 // | 1248 // |
| 1278 // x <- phi(x0, x + 1) | 1249 // x <- phi(x0, x + 1) |
| 1279 // | 1250 // |
| 1280 // we can conclude that LowerBound(x) == x0. | 1251 // we can conclude that LowerBound(x) == x0. |
| 1281 const InductionVariableInfo& info = *phi->induction_variable_info(); | 1252 const InductionVariableInfo& info = *phi->induction_variable_info(); |
| 1282 return ConstructLowerBound(info.initial_value(), point); | 1253 return ConstructLowerBound(info.initial_value(), point); |
| 1283 } | 1254 } |
| 1284 | 1255 |
| 1285 // Try to re-associate binary operations in the floating DAG of operations | 1256 // Try to re-associate binary operations in the floating DAG of operations |
| (...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1389 ASSERT(right != NULL); | 1360 ASSERT(right != NULL); |
| 1390 | 1361 |
| 1391 const bool left_changed = (left != binary_op->left()->definition()); | 1362 const bool left_changed = (left != binary_op->left()->definition()); |
| 1392 const bool right_changed = (right != binary_op->right()->definition()); | 1363 const bool right_changed = (right != binary_op->right()->definition()); |
| 1393 if (left_changed || right_changed) { | 1364 if (left_changed || right_changed) { |
| 1394 if (!(*defn)->HasSSATemp()) { | 1365 if (!(*defn)->HasSSATemp()) { |
| 1395 if (left_changed) binary_op->left()->set_definition(left); | 1366 if (left_changed) binary_op->left()->set_definition(left); |
| 1396 if (right_changed) binary_op->right()->set_definition(right); | 1367 if (right_changed) binary_op->right()->set_definition(right); |
| 1397 *defn = binary_op; | 1368 *defn = binary_op; |
| 1398 } else { | 1369 } else { |
| 1399 *defn = MakeBinaryOp(binary_op->op_kind(), | 1370 *defn = MakeBinaryOp(binary_op->op_kind(), UnwrapConstraint(left), |
| 1400 UnwrapConstraint(left), | |
| 1401 UnwrapConstraint(right)); | 1371 UnwrapConstraint(right)); |
| 1402 } | 1372 } |
| 1403 } | 1373 } |
| 1404 | 1374 |
| 1405 if ((c != 0) && (constant == NULL)) { | 1375 if ((c != 0) && (constant == NULL)) { |
| 1406 *defn = MakeBinaryOp(Token::kADD, *defn, c); | 1376 *defn = MakeBinaryOp(Token::kADD, *defn, c); |
| 1407 } | 1377 } |
| 1408 } else if ((*defn)->IsConstant()) { | 1378 } else if ((*defn)->IsConstant()) { |
| 1409 ConstantInstr* constant_defn = (*defn)->AsConstant(); | 1379 ConstantInstr* constant_defn = (*defn)->AsConstant(); |
| 1410 if ((constant != NULL) && constant_defn->value().IsSmi()) { | 1380 if ((constant != NULL) && constant_defn->value().IsSmi()) { |
| (...skipping 30 matching lines...) Expand all Loading... |
| 1441 } | 1411 } |
| 1442 | 1412 |
| 1443 if (binary_op->op_kind() == Token::kSUB) { | 1413 if (binary_op->op_kind() == Token::kSUB) { |
| 1444 // For addition and multiplication it's enough to ensure that | 1414 // For addition and multiplication it's enough to ensure that |
| 1445 // lhs and rhs are positive to guarantee that defn as whole is | 1415 // lhs and rhs are positive to guarantee that defn as whole is |
| 1446 // positive. This does not work for substraction so just give up. | 1416 // positive. This does not work for substraction so just give up. |
| 1447 return false; | 1417 return false; |
| 1448 } | 1418 } |
| 1449 | 1419 |
| 1450 return FindNonPositiveSymbols(symbols, binary_op->left()->definition()) && | 1420 return FindNonPositiveSymbols(symbols, binary_op->left()->definition()) && |
| 1451 FindNonPositiveSymbols(symbols, binary_op->right()->definition()); | 1421 FindNonPositiveSymbols(symbols, binary_op->right()->definition()); |
| 1452 } | 1422 } |
| 1453 UNREACHABLE(); | 1423 UNREACHABLE(); |
| 1454 return false; | 1424 return false; |
| 1455 } | 1425 } |
| 1456 | 1426 |
| 1457 // Find innermost constraint for the given definition dominating given | 1427 // Find innermost constraint for the given definition dominating given |
| 1458 // instruction. | 1428 // instruction. |
| 1459 static Definition* FindInnermostConstraint(Definition* defn, | 1429 static Definition* FindInnermostConstraint(Definition* defn, |
| 1460 Instruction* post_dominator) { | 1430 Instruction* post_dominator) { |
| 1461 for (Value* use = defn->input_use_list(); | 1431 for (Value* use = defn->input_use_list(); use != NULL; |
| 1462 use != NULL; | |
| 1463 use = use->next_use()) { | 1432 use = use->next_use()) { |
| 1464 ConstraintInstr* constraint = use->instruction()->AsConstraint(); | 1433 ConstraintInstr* constraint = use->instruction()->AsConstraint(); |
| 1465 if ((constraint != NULL) && post_dominator->IsDominatedBy(constraint)) { | 1434 if ((constraint != NULL) && post_dominator->IsDominatedBy(constraint)) { |
| 1466 return FindInnermostConstraint(constraint, post_dominator); | 1435 return FindInnermostConstraint(constraint, post_dominator); |
| 1467 } | 1436 } |
| 1468 } | 1437 } |
| 1469 return defn; | 1438 return defn; |
| 1470 } | 1439 } |
| 1471 | 1440 |
| 1472 // Replace symbolic parts of the boundary with respective constraints | 1441 // Replace symbolic parts of the boundary with respective constraints |
| (...skipping 12 matching lines...) Expand all Loading... |
| 1485 ConstraintInstr* constraint = (*constraints)[i]; | 1454 ConstraintInstr* constraint = (*constraints)[i]; |
| 1486 if (constraint->value()->definition() == defn) { | 1455 if (constraint->value()->definition() == defn) { |
| 1487 return constraint; | 1456 return constraint; |
| 1488 } | 1457 } |
| 1489 } | 1458 } |
| 1490 } | 1459 } |
| 1491 return defn; | 1460 return defn; |
| 1492 } | 1461 } |
| 1493 | 1462 |
| 1494 for (intptr_t i = 0; i < defn->InputCount(); i++) { | 1463 for (intptr_t i = 0; i < defn->InputCount(); i++) { |
| 1495 defn->InputAt(i)->set_definition( | 1464 defn->InputAt(i)->set_definition(ApplyConstraints( |
| 1496 ApplyConstraints(defn->InputAt(i)->definition(), | 1465 defn->InputAt(i)->definition(), post_dominator, constraints)); |
| 1497 post_dominator, | |
| 1498 constraints)); | |
| 1499 } | 1466 } |
| 1500 | 1467 |
| 1501 return defn; | 1468 return defn; |
| 1502 } | 1469 } |
| 1503 | 1470 |
| 1504 #ifndef PRODUCT | 1471 #ifndef PRODUCT |
| 1505 static void PrettyPrintIndexBoundRecursively(BufferFormatter* f, | 1472 static void PrettyPrintIndexBoundRecursively(BufferFormatter* f, |
| 1506 Definition* index_bound) { | 1473 Definition* index_bound) { |
| 1507 BinarySmiOpInstr* binary_op = index_bound->AsBinarySmiOp(); | 1474 BinarySmiOpInstr* binary_op = index_bound->AsBinarySmiOp(); |
| 1508 if (binary_op != NULL) { | 1475 if (binary_op != NULL) { |
| (...skipping 26 matching lines...) Expand all Loading... |
| 1535 }; | 1502 }; |
| 1536 | 1503 |
| 1537 | 1504 |
| 1538 void RangeAnalysis::EliminateRedundantBoundsChecks() { | 1505 void RangeAnalysis::EliminateRedundantBoundsChecks() { |
| 1539 if (FLAG_array_bounds_check_elimination) { | 1506 if (FLAG_array_bounds_check_elimination) { |
| 1540 const Function& function = flow_graph_->function(); | 1507 const Function& function = flow_graph_->function(); |
| 1541 // Generalization only if we have not deoptimized on a generalized | 1508 // Generalization only if we have not deoptimized on a generalized |
| 1542 // check earlier, or we're compiling precompiled code (no | 1509 // check earlier, or we're compiling precompiled code (no |
| 1543 // optimistic hoisting of checks possible) | 1510 // optimistic hoisting of checks possible) |
| 1544 const bool try_generalization = | 1511 const bool try_generalization = |
| 1545 function.allows_bounds_check_generalization() && | 1512 function.allows_bounds_check_generalization() && !FLAG_precompiled_mode; |
| 1546 !FLAG_precompiled_mode; | |
| 1547 | 1513 |
| 1548 BoundsCheckGeneralizer generalizer(this, flow_graph_); | 1514 BoundsCheckGeneralizer generalizer(this, flow_graph_); |
| 1549 | 1515 |
| 1550 for (intptr_t i = 0; i < bounds_checks_.length(); i++) { | 1516 for (intptr_t i = 0; i < bounds_checks_.length(); i++) { |
| 1551 CheckArrayBoundInstr* check = bounds_checks_[i]; | 1517 CheckArrayBoundInstr* check = bounds_checks_[i]; |
| 1552 RangeBoundary array_length = | 1518 RangeBoundary array_length = |
| 1553 RangeBoundary::FromDefinition(check->length()->definition()); | 1519 RangeBoundary::FromDefinition(check->length()->definition()); |
| 1554 if (check->IsRedundant(array_length)) { | 1520 if (check->IsRedundant(array_length)) { |
| 1555 check->RemoveFromGraph(); | 1521 check->RemoveFromGraph(); |
| 1556 } else if (try_generalization) { | 1522 } else if (try_generalization) { |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1610 } | 1576 } |
| 1611 constraints_[i]->ReplaceUsesWith(def); | 1577 constraints_[i]->ReplaceUsesWith(def); |
| 1612 constraints_[i]->RemoveFromGraph(); | 1578 constraints_[i]->RemoveFromGraph(); |
| 1613 } | 1579 } |
| 1614 } | 1580 } |
| 1615 | 1581 |
| 1616 | 1582 |
| 1617 static void NarrowBinaryMintOp(BinaryMintOpInstr* mint_op) { | 1583 static void NarrowBinaryMintOp(BinaryMintOpInstr* mint_op) { |
| 1618 if (RangeUtils::Fits(mint_op->range(), RangeBoundary::kRangeBoundaryInt32) && | 1584 if (RangeUtils::Fits(mint_op->range(), RangeBoundary::kRangeBoundaryInt32) && |
| 1619 RangeUtils::Fits(mint_op->left()->definition()->range(), | 1585 RangeUtils::Fits(mint_op->left()->definition()->range(), |
| 1620 RangeBoundary::kRangeBoundaryInt32) && | 1586 RangeBoundary::kRangeBoundaryInt32) && |
| 1621 RangeUtils::Fits(mint_op->right()->definition()->range(), | 1587 RangeUtils::Fits(mint_op->right()->definition()->range(), |
| 1622 RangeBoundary::kRangeBoundaryInt32) && | 1588 RangeBoundary::kRangeBoundaryInt32) && |
| 1623 BinaryInt32OpInstr::IsSupported(mint_op->op_kind(), | 1589 BinaryInt32OpInstr::IsSupported(mint_op->op_kind(), mint_op->left(), |
| 1624 mint_op->left(), | |
| 1625 mint_op->right())) { | 1590 mint_op->right())) { |
| 1626 BinaryInt32OpInstr* int32_op = | 1591 BinaryInt32OpInstr* int32_op = new BinaryInt32OpInstr( |
| 1627 new BinaryInt32OpInstr(mint_op->op_kind(), | 1592 mint_op->op_kind(), mint_op->left()->CopyWithType(), |
| 1628 mint_op->left()->CopyWithType(), | 1593 mint_op->right()->CopyWithType(), mint_op->DeoptimizationTarget()); |
| 1629 mint_op->right()->CopyWithType(), | |
| 1630 mint_op->DeoptimizationTarget()); | |
| 1631 int32_op->set_range(*mint_op->range()); | 1594 int32_op->set_range(*mint_op->range()); |
| 1632 int32_op->set_can_overflow(false); | 1595 int32_op->set_can_overflow(false); |
| 1633 mint_op->ReplaceWith(int32_op, NULL); | 1596 mint_op->ReplaceWith(int32_op, NULL); |
| 1634 } | 1597 } |
| 1635 } | 1598 } |
| 1636 | 1599 |
| 1637 | 1600 |
| 1638 static void NarrowShiftMintOp(ShiftMintOpInstr* mint_op) { | 1601 static void NarrowShiftMintOp(ShiftMintOpInstr* mint_op) { |
| 1639 if (RangeUtils::Fits(mint_op->range(), RangeBoundary::kRangeBoundaryInt32) && | 1602 if (RangeUtils::Fits(mint_op->range(), RangeBoundary::kRangeBoundaryInt32) && |
| 1640 RangeUtils::Fits(mint_op->left()->definition()->range(), | 1603 RangeUtils::Fits(mint_op->left()->definition()->range(), |
| 1641 RangeBoundary::kRangeBoundaryInt32) && | 1604 RangeBoundary::kRangeBoundaryInt32) && |
| 1642 RangeUtils::Fits(mint_op->right()->definition()->range(), | 1605 RangeUtils::Fits(mint_op->right()->definition()->range(), |
| 1643 RangeBoundary::kRangeBoundaryInt32) && | 1606 RangeBoundary::kRangeBoundaryInt32) && |
| 1644 BinaryInt32OpInstr::IsSupported(mint_op->op_kind(), | 1607 BinaryInt32OpInstr::IsSupported(mint_op->op_kind(), mint_op->left(), |
| 1645 mint_op->left(), | |
| 1646 mint_op->right())) { | 1608 mint_op->right())) { |
| 1647 BinaryInt32OpInstr* int32_op = | 1609 BinaryInt32OpInstr* int32_op = new BinaryInt32OpInstr( |
| 1648 new BinaryInt32OpInstr(mint_op->op_kind(), | 1610 mint_op->op_kind(), mint_op->left()->CopyWithType(), |
| 1649 mint_op->left()->CopyWithType(), | 1611 mint_op->right()->CopyWithType(), mint_op->DeoptimizationTarget()); |
| 1650 mint_op->right()->CopyWithType(), | |
| 1651 mint_op->DeoptimizationTarget()); | |
| 1652 int32_op->set_range(*mint_op->range()); | 1612 int32_op->set_range(*mint_op->range()); |
| 1653 int32_op->set_can_overflow(false); | 1613 int32_op->set_can_overflow(false); |
| 1654 mint_op->ReplaceWith(int32_op, NULL); | 1614 mint_op->ReplaceWith(int32_op, NULL); |
| 1655 } | 1615 } |
| 1656 } | 1616 } |
| 1657 | 1617 |
| 1658 | 1618 |
| 1659 void RangeAnalysis::NarrowMintToInt32() { | 1619 void RangeAnalysis::NarrowMintToInt32() { |
| 1660 for (intptr_t i = 0; i < binary_mint_ops_.length(); i++) { | 1620 for (intptr_t i = 0; i < binary_mint_ops_.length(); i++) { |
| 1661 NarrowBinaryMintOp(binary_mint_ops_[i]); | 1621 NarrowBinaryMintOp(binary_mint_ops_[i]); |
| 1662 } | 1622 } |
| 1663 | 1623 |
| 1664 for (intptr_t i = 0; i < shift_mint_ops_.length(); i++) { | 1624 for (intptr_t i = 0; i < shift_mint_ops_.length(); i++) { |
| 1665 NarrowShiftMintOp(shift_mint_ops_[i]); | 1625 NarrowShiftMintOp(shift_mint_ops_[i]); |
| 1666 } | 1626 } |
| 1667 } | 1627 } |
| 1668 | 1628 |
| 1669 | 1629 |
| 1670 IntegerInstructionSelector::IntegerInstructionSelector(FlowGraph* flow_graph) | 1630 IntegerInstructionSelector::IntegerInstructionSelector(FlowGraph* flow_graph) |
| 1671 : flow_graph_(flow_graph) { | 1631 : flow_graph_(flow_graph) { |
| 1672 ASSERT(flow_graph_ != NULL); | 1632 ASSERT(flow_graph_ != NULL); |
| 1673 zone_ = flow_graph_->zone(); | 1633 zone_ = flow_graph_->zone(); |
| 1674 selected_uint32_defs_ = | 1634 selected_uint32_defs_ = |
| 1675 new(zone_) BitVector(zone_, flow_graph_->current_ssa_temp_index()); | 1635 new (zone_) BitVector(zone_, flow_graph_->current_ssa_temp_index()); |
| 1676 } | 1636 } |
| 1677 | 1637 |
| 1678 | 1638 |
| 1679 void IntegerInstructionSelector::Select() { | 1639 void IntegerInstructionSelector::Select() { |
| 1680 if (FLAG_trace_integer_ir_selection) { | 1640 if (FLAG_trace_integer_ir_selection) { |
| 1681 THR_Print("---- starting integer ir selection -------\n"); | 1641 THR_Print("---- starting integer ir selection -------\n"); |
| 1682 } | 1642 } |
| 1683 FindPotentialUint32Definitions(); | 1643 FindPotentialUint32Definitions(); |
| 1684 FindUint32NarrowingDefinitions(); | 1644 FindUint32NarrowingDefinitions(); |
| 1685 Propagate(); | 1645 Propagate(); |
| 1686 ReplaceInstructions(); | 1646 ReplaceInstructions(); |
| 1687 if (FLAG_support_il_printer && FLAG_trace_integer_ir_selection) { | 1647 if (FLAG_support_il_printer && FLAG_trace_integer_ir_selection) { |
| 1688 THR_Print("---- after integer ir selection -------\n"); | 1648 THR_Print("---- after integer ir selection -------\n"); |
| 1689 FlowGraphPrinter printer(*flow_graph_); | 1649 FlowGraphPrinter printer(*flow_graph_); |
| 1690 printer.PrintBlocks(); | 1650 printer.PrintBlocks(); |
| 1691 } | 1651 } |
| 1692 } | 1652 } |
| 1693 | 1653 |
| 1694 | 1654 |
| 1695 bool IntegerInstructionSelector::IsPotentialUint32Definition(Definition* def) { | 1655 bool IntegerInstructionSelector::IsPotentialUint32Definition(Definition* def) { |
| 1696 // TODO(johnmccutchan): Consider Smi operations, to avoid unnecessary tagging | 1656 // TODO(johnmccutchan): Consider Smi operations, to avoid unnecessary tagging |
| 1697 // & untagged of intermediate results. | 1657 // & untagged of intermediate results. |
| 1698 // TODO(johnmccutchan): Consider phis. | 1658 // TODO(johnmccutchan): Consider phis. |
| 1699 return def->IsBoxInt64() || | 1659 return def->IsBoxInt64() || def->IsUnboxInt64() || def->IsBinaryMintOp() || |
| 1700 def->IsUnboxInt64() || | 1660 def->IsShiftMintOp() || def->IsUnaryMintOp(); |
| 1701 def->IsBinaryMintOp() || | |
| 1702 def->IsShiftMintOp() || | |
| 1703 def->IsUnaryMintOp(); | |
| 1704 } | 1661 } |
| 1705 | 1662 |
| 1706 | 1663 |
| 1707 void IntegerInstructionSelector::FindPotentialUint32Definitions() { | 1664 void IntegerInstructionSelector::FindPotentialUint32Definitions() { |
| 1708 if (FLAG_trace_integer_ir_selection) { | 1665 if (FLAG_trace_integer_ir_selection) { |
| 1709 THR_Print("++++ Finding potential Uint32 definitions:\n"); | 1666 THR_Print("++++ Finding potential Uint32 definitions:\n"); |
| 1710 } | 1667 } |
| 1711 | 1668 |
| 1712 for (BlockIterator block_it = flow_graph_->reverse_postorder_iterator(); | 1669 for (BlockIterator block_it = flow_graph_->reverse_postorder_iterator(); |
| 1713 !block_it.Done(); | 1670 !block_it.Done(); block_it.Advance()) { |
| 1714 block_it.Advance()) { | |
| 1715 BlockEntryInstr* block = block_it.Current(); | 1671 BlockEntryInstr* block = block_it.Current(); |
| 1716 | 1672 |
| 1717 for (ForwardInstructionIterator instr_it(block); | 1673 for (ForwardInstructionIterator instr_it(block); !instr_it.Done(); |
| 1718 !instr_it.Done(); | |
| 1719 instr_it.Advance()) { | 1674 instr_it.Advance()) { |
| 1720 Instruction* current = instr_it.Current(); | 1675 Instruction* current = instr_it.Current(); |
| 1721 Definition* defn = current->AsDefinition(); | 1676 Definition* defn = current->AsDefinition(); |
| 1722 if ((defn != NULL) && defn->HasSSATemp()) { | 1677 if ((defn != NULL) && defn->HasSSATemp()) { |
| 1723 if (IsPotentialUint32Definition(defn)) { | 1678 if (IsPotentialUint32Definition(defn)) { |
| 1724 if (FLAG_support_il_printer && FLAG_trace_integer_ir_selection) { | 1679 if (FLAG_support_il_printer && FLAG_trace_integer_ir_selection) { |
| 1725 THR_Print("Adding %s\n", current->ToCString()); | 1680 THR_Print("Adding %s\n", current->ToCString()); |
| 1726 } | 1681 } |
| 1727 potential_uint32_defs_.Add(defn); | 1682 potential_uint32_defs_.Add(defn); |
| 1728 } | 1683 } |
| 1729 } | 1684 } |
| 1730 } | 1685 } |
| 1731 } | 1686 } |
| 1732 } | 1687 } |
| 1733 | 1688 |
| 1734 | 1689 |
| 1735 // BinaryMintOp masks and stores into unsigned typed arrays that truncate the | 1690 // BinaryMintOp masks and stores into unsigned typed arrays that truncate the |
| (...skipping 29 matching lines...) Expand all Loading... |
| 1765 if (FLAG_support_il_printer && FLAG_trace_integer_ir_selection) { | 1720 if (FLAG_support_il_printer && FLAG_trace_integer_ir_selection) { |
| 1766 THR_Print("Adding %s\n", defn->ToCString()); | 1721 THR_Print("Adding %s\n", defn->ToCString()); |
| 1767 } | 1722 } |
| 1768 selected_uint32_defs_->Add(defn->ssa_temp_index()); | 1723 selected_uint32_defs_->Add(defn->ssa_temp_index()); |
| 1769 } | 1724 } |
| 1770 } | 1725 } |
| 1771 } | 1726 } |
| 1772 | 1727 |
| 1773 | 1728 |
| 1774 bool IntegerInstructionSelector::AllUsesAreUint32Narrowing(Value* list_head) { | 1729 bool IntegerInstructionSelector::AllUsesAreUint32Narrowing(Value* list_head) { |
| 1775 for (Value::Iterator it(list_head); | 1730 for (Value::Iterator it(list_head); !it.Done(); it.Advance()) { |
| 1776 !it.Done(); | |
| 1777 it.Advance()) { | |
| 1778 Value* use = it.Current(); | 1731 Value* use = it.Current(); |
| 1779 Definition* defn = use->instruction()->AsDefinition(); | 1732 Definition* defn = use->instruction()->AsDefinition(); |
| 1780 if ((defn == NULL) || | 1733 if ((defn == NULL) || !defn->HasSSATemp() || |
| 1781 !defn->HasSSATemp() || | |
| 1782 !selected_uint32_defs_->Contains(defn->ssa_temp_index())) { | 1734 !selected_uint32_defs_->Contains(defn->ssa_temp_index())) { |
| 1783 return false; | 1735 return false; |
| 1784 } | 1736 } |
| 1785 } | 1737 } |
| 1786 return true; | 1738 return true; |
| 1787 } | 1739 } |
| 1788 | 1740 |
| 1789 | 1741 |
| 1790 bool IntegerInstructionSelector::CanBecomeUint32(Definition* def) { | 1742 bool IntegerInstructionSelector::CanBecomeUint32(Definition* def) { |
| 1791 ASSERT(IsPotentialUint32Definition(def)); | 1743 ASSERT(IsPotentialUint32Definition(def)); |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1858 // Should only see mint definitions. | 1810 // Should only see mint definitions. |
| 1859 ASSERT(IsPotentialUint32Definition(def)); | 1811 ASSERT(IsPotentialUint32Definition(def)); |
| 1860 // Should not see constant instructions. | 1812 // Should not see constant instructions. |
| 1861 ASSERT(!def->IsConstant()); | 1813 ASSERT(!def->IsConstant()); |
| 1862 if (def->IsBinaryMintOp()) { | 1814 if (def->IsBinaryMintOp()) { |
| 1863 BinaryMintOpInstr* op = def->AsBinaryMintOp(); | 1815 BinaryMintOpInstr* op = def->AsBinaryMintOp(); |
| 1864 Token::Kind op_kind = op->op_kind(); | 1816 Token::Kind op_kind = op->op_kind(); |
| 1865 Value* left = op->left()->CopyWithType(); | 1817 Value* left = op->left()->CopyWithType(); |
| 1866 Value* right = op->right()->CopyWithType(); | 1818 Value* right = op->right()->CopyWithType(); |
| 1867 intptr_t deopt_id = op->DeoptimizationTarget(); | 1819 intptr_t deopt_id = op->DeoptimizationTarget(); |
| 1868 return new(Z) BinaryUint32OpInstr(op_kind, left, right, deopt_id); | 1820 return new (Z) BinaryUint32OpInstr(op_kind, left, right, deopt_id); |
| 1869 } else if (def->IsBoxInt64()) { | 1821 } else if (def->IsBoxInt64()) { |
| 1870 Value* value = def->AsBoxInt64()->value()->CopyWithType(); | 1822 Value* value = def->AsBoxInt64()->value()->CopyWithType(); |
| 1871 return new(Z) BoxUint32Instr(value); | 1823 return new (Z) BoxUint32Instr(value); |
| 1872 } else if (def->IsUnboxInt64()) { | 1824 } else if (def->IsUnboxInt64()) { |
| 1873 UnboxInstr* unbox = def->AsUnboxInt64(); | 1825 UnboxInstr* unbox = def->AsUnboxInt64(); |
| 1874 Value* value = unbox->value()->CopyWithType(); | 1826 Value* value = unbox->value()->CopyWithType(); |
| 1875 intptr_t deopt_id = unbox->DeoptimizationTarget(); | 1827 intptr_t deopt_id = unbox->DeoptimizationTarget(); |
| 1876 return new(Z) UnboxUint32Instr(value, deopt_id); | 1828 return new (Z) UnboxUint32Instr(value, deopt_id); |
| 1877 } else if (def->IsUnaryMintOp()) { | 1829 } else if (def->IsUnaryMintOp()) { |
| 1878 UnaryMintOpInstr* op = def->AsUnaryMintOp(); | 1830 UnaryMintOpInstr* op = def->AsUnaryMintOp(); |
| 1879 Token::Kind op_kind = op->op_kind(); | 1831 Token::Kind op_kind = op->op_kind(); |
| 1880 Value* value = op->value()->CopyWithType(); | 1832 Value* value = op->value()->CopyWithType(); |
| 1881 intptr_t deopt_id = op->DeoptimizationTarget(); | 1833 intptr_t deopt_id = op->DeoptimizationTarget(); |
| 1882 return new(Z) UnaryUint32OpInstr(op_kind, value, deopt_id); | 1834 return new (Z) UnaryUint32OpInstr(op_kind, value, deopt_id); |
| 1883 } else if (def->IsShiftMintOp()) { | 1835 } else if (def->IsShiftMintOp()) { |
| 1884 ShiftMintOpInstr* op = def->AsShiftMintOp(); | 1836 ShiftMintOpInstr* op = def->AsShiftMintOp(); |
| 1885 Token::Kind op_kind = op->op_kind(); | 1837 Token::Kind op_kind = op->op_kind(); |
| 1886 Value* left = op->left()->CopyWithType(); | 1838 Value* left = op->left()->CopyWithType(); |
| 1887 Value* right = op->right()->CopyWithType(); | 1839 Value* right = op->right()->CopyWithType(); |
| 1888 intptr_t deopt_id = op->DeoptimizationTarget(); | 1840 intptr_t deopt_id = op->DeoptimizationTarget(); |
| 1889 return new(Z) ShiftUint32OpInstr(op_kind, left, right, deopt_id); | 1841 return new (Z) ShiftUint32OpInstr(op_kind, left, right, deopt_id); |
| 1890 } | 1842 } |
| 1891 UNREACHABLE(); | 1843 UNREACHABLE(); |
| 1892 return NULL; | 1844 return NULL; |
| 1893 } | 1845 } |
| 1894 | 1846 |
| 1895 | 1847 |
| 1896 void IntegerInstructionSelector::ReplaceInstructions() { | 1848 void IntegerInstructionSelector::ReplaceInstructions() { |
| 1897 if (FLAG_trace_integer_ir_selection) { | 1849 if (FLAG_trace_integer_ir_selection) { |
| 1898 THR_Print("++++ Replacing instructions:\n"); | 1850 THR_Print("++++ Replacing instructions:\n"); |
| 1899 } | 1851 } |
| 1900 for (intptr_t i = 0; i < potential_uint32_defs_.length(); i++) { | 1852 for (intptr_t i = 0; i < potential_uint32_defs_.length(); i++) { |
| 1901 Definition* defn = potential_uint32_defs_[i]; | 1853 Definition* defn = potential_uint32_defs_[i]; |
| 1902 if (!selected_uint32_defs_->Contains(defn->ssa_temp_index())) { | 1854 if (!selected_uint32_defs_->Contains(defn->ssa_temp_index())) { |
| 1903 // Not a candidate. | 1855 // Not a candidate. |
| 1904 continue; | 1856 continue; |
| 1905 } | 1857 } |
| 1906 Definition* replacement = ConstructReplacementFor(defn); | 1858 Definition* replacement = ConstructReplacementFor(defn); |
| 1907 ASSERT(replacement != NULL); | 1859 ASSERT(replacement != NULL); |
| 1908 if (FLAG_support_il_printer && FLAG_trace_integer_ir_selection) { | 1860 if (FLAG_support_il_printer && FLAG_trace_integer_ir_selection) { |
| 1909 THR_Print("Replacing %s with %s\n", defn->ToCString(), | 1861 THR_Print("Replacing %s with %s\n", defn->ToCString(), |
| 1910 replacement->ToCString()); | 1862 replacement->ToCString()); |
| 1911 } | 1863 } |
| 1912 if (!Range::IsUnknown(defn->range())) { | 1864 if (!Range::IsUnknown(defn->range())) { |
| 1913 replacement->set_range(*defn->range()); | 1865 replacement->set_range(*defn->range()); |
| 1914 } | 1866 } |
| 1915 defn->ReplaceWith(replacement, NULL); | 1867 defn->ReplaceWith(replacement, NULL); |
| 1916 ASSERT(flow_graph_->VerifyUseLists()); | 1868 ASSERT(flow_graph_->VerifyUseLists()); |
| 1917 } | 1869 } |
| 1918 } | 1870 } |
| 1919 | 1871 |
| 1920 | 1872 |
| 1921 RangeBoundary RangeBoundary::FromDefinition(Definition* defn, int64_t offs) { | 1873 RangeBoundary RangeBoundary::FromDefinition(Definition* defn, int64_t offs) { |
| 1922 if (defn->IsConstant() && defn->AsConstant()->value().IsSmi()) { | 1874 if (defn->IsConstant() && defn->AsConstant()->value().IsSmi()) { |
| 1923 return FromConstant(Smi::Cast(defn->AsConstant()->value()).Value() + offs); | 1875 return FromConstant(Smi::Cast(defn->AsConstant()->value()).Value() + offs); |
| 1924 } | 1876 } |
| 1925 return RangeBoundary(kSymbol, reinterpret_cast<intptr_t>(defn), offs); | 1877 return RangeBoundary(kSymbol, reinterpret_cast<intptr_t>(defn), offs); |
| 1926 } | 1878 } |
| 1927 | 1879 |
| 1928 | 1880 |
| 1929 RangeBoundary RangeBoundary::LowerBound() const { | 1881 RangeBoundary RangeBoundary::LowerBound() const { |
| 1930 if (IsInfinity()) { | 1882 if (IsInfinity()) { |
| 1931 return NegativeInfinity(); | 1883 return NegativeInfinity(); |
| 1932 } | 1884 } |
| 1933 if (IsConstant()) return *this; | 1885 if (IsConstant()) return *this; |
| 1934 return Add(Range::ConstantMinSmi(symbol()->range()), | 1886 return Add(Range::ConstantMinSmi(symbol()->range()), |
| 1935 RangeBoundary::FromConstant(offset_), | 1887 RangeBoundary::FromConstant(offset_), NegativeInfinity()); |
| 1936 NegativeInfinity()); | |
| 1937 } | 1888 } |
| 1938 | 1889 |
| 1939 | 1890 |
| 1940 RangeBoundary RangeBoundary::UpperBound() const { | 1891 RangeBoundary RangeBoundary::UpperBound() const { |
| 1941 if (IsInfinity()) { | 1892 if (IsInfinity()) { |
| 1942 return PositiveInfinity(); | 1893 return PositiveInfinity(); |
| 1943 } | 1894 } |
| 1944 if (IsConstant()) return *this; | 1895 if (IsConstant()) return *this; |
| 1945 | 1896 |
| 1946 return Add(Range::ConstantMaxSmi(symbol()->range()), | 1897 return Add(Range::ConstantMaxSmi(symbol()->range()), |
| 1947 RangeBoundary::FromConstant(offset_), | 1898 RangeBoundary::FromConstant(offset_), PositiveInfinity()); |
| 1948 PositiveInfinity()); | |
| 1949 } | 1899 } |
| 1950 | 1900 |
| 1951 | 1901 |
| 1952 RangeBoundary RangeBoundary::Add(const RangeBoundary& a, | 1902 RangeBoundary RangeBoundary::Add(const RangeBoundary& a, |
| 1953 const RangeBoundary& b, | 1903 const RangeBoundary& b, |
| 1954 const RangeBoundary& overflow) { | 1904 const RangeBoundary& overflow) { |
| 1955 if (a.IsInfinity() || b.IsInfinity()) return overflow; | 1905 if (a.IsInfinity() || b.IsInfinity()) return overflow; |
| 1956 | 1906 |
| 1957 ASSERT(a.IsConstant() && b.IsConstant()); | 1907 ASSERT(a.IsConstant() && b.IsConstant()); |
| 1958 if (Utils::WillAddOverflow(a.ConstantValue(), b.ConstantValue())) { | 1908 if (Utils::WillAddOverflow(a.ConstantValue(), b.ConstantValue())) { |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2031 | 1981 |
| 2032 | 1982 |
| 2033 RangeBoundary RangeBoundary::Shl(const RangeBoundary& value_boundary, | 1983 RangeBoundary RangeBoundary::Shl(const RangeBoundary& value_boundary, |
| 2034 int64_t shift_count, | 1984 int64_t shift_count, |
| 2035 const RangeBoundary& overflow) { | 1985 const RangeBoundary& overflow) { |
| 2036 ASSERT(value_boundary.IsConstant()); | 1986 ASSERT(value_boundary.IsConstant()); |
| 2037 ASSERT(shift_count >= 0); | 1987 ASSERT(shift_count >= 0); |
| 2038 int64_t limit = 64 - shift_count; | 1988 int64_t limit = 64 - shift_count; |
| 2039 int64_t value = value_boundary.ConstantValue(); | 1989 int64_t value = value_boundary.ConstantValue(); |
| 2040 | 1990 |
| 2041 if ((value == 0) || | 1991 if ((value == 0) || (shift_count == 0) || |
| 2042 (shift_count == 0) || | |
| 2043 ((limit > 0) && Utils::IsInt(static_cast<int>(limit), value))) { | 1992 ((limit > 0) && Utils::IsInt(static_cast<int>(limit), value))) { |
| 2044 // Result stays in 64 bit range. | 1993 // Result stays in 64 bit range. |
| 2045 int64_t result = value << shift_count; | 1994 int64_t result = value << shift_count; |
| 2046 return RangeBoundary(result); | 1995 return RangeBoundary(result); |
| 2047 } | 1996 } |
| 2048 | 1997 |
| 2049 return overflow; | 1998 return overflow; |
| 2050 } | 1999 } |
| 2051 | 2000 |
| 2052 | 2001 |
| (...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2179 } | 2128 } |
| 2180 | 2129 |
| 2181 | 2130 |
| 2182 RangeBoundary RangeBoundary::JoinMin(RangeBoundary a, | 2131 RangeBoundary RangeBoundary::JoinMin(RangeBoundary a, |
| 2183 RangeBoundary b, | 2132 RangeBoundary b, |
| 2184 RangeBoundary::RangeSize size) { | 2133 RangeBoundary::RangeSize size) { |
| 2185 if (a.Equals(b)) { | 2134 if (a.Equals(b)) { |
| 2186 return b; | 2135 return b; |
| 2187 } | 2136 } |
| 2188 | 2137 |
| 2189 if (CanonicalizeForComparison(&a, | 2138 if (CanonicalizeForComparison(&a, &b, &CanonicalizeMinBoundary, |
| 2190 &b, | |
| 2191 &CanonicalizeMinBoundary, | |
| 2192 RangeBoundary::NegativeInfinity())) { | 2139 RangeBoundary::NegativeInfinity())) { |
| 2193 return (a.offset() <= b.offset()) ? a : b; | 2140 return (a.offset() <= b.offset()) ? a : b; |
| 2194 } | 2141 } |
| 2195 | 2142 |
| 2196 const int64_t inf_a = a.LowerBound(size); | 2143 const int64_t inf_a = a.LowerBound(size); |
| 2197 const int64_t inf_b = b.LowerBound(size); | 2144 const int64_t inf_b = b.LowerBound(size); |
| 2198 const int64_t sup_a = a.UpperBound(size); | 2145 const int64_t sup_a = a.UpperBound(size); |
| 2199 const int64_t sup_b = b.UpperBound(size); | 2146 const int64_t sup_b = b.UpperBound(size); |
| 2200 | 2147 |
| 2201 if ((sup_a <= inf_b) && !a.LowerBound().Overflowed(size)) { | 2148 if ((sup_a <= inf_b) && !a.LowerBound().Overflowed(size)) { |
| 2202 return a; | 2149 return a; |
| 2203 } else if ((sup_b <= inf_a) && !b.LowerBound().Overflowed(size)) { | 2150 } else if ((sup_b <= inf_a) && !b.LowerBound().Overflowed(size)) { |
| 2204 return b; | 2151 return b; |
| 2205 } else { | 2152 } else { |
| 2206 return RangeBoundary::FromConstant(Utils::Minimum(inf_a, inf_b)); | 2153 return RangeBoundary::FromConstant(Utils::Minimum(inf_a, inf_b)); |
| 2207 } | 2154 } |
| 2208 } | 2155 } |
| 2209 | 2156 |
| 2210 | 2157 |
| 2211 RangeBoundary RangeBoundary::JoinMax(RangeBoundary a, | 2158 RangeBoundary RangeBoundary::JoinMax(RangeBoundary a, |
| 2212 RangeBoundary b, | 2159 RangeBoundary b, |
| 2213 RangeBoundary::RangeSize size) { | 2160 RangeBoundary::RangeSize size) { |
| 2214 if (a.Equals(b)) { | 2161 if (a.Equals(b)) { |
| 2215 return b; | 2162 return b; |
| 2216 } | 2163 } |
| 2217 | 2164 |
| 2218 if (CanonicalizeForComparison(&a, | 2165 if (CanonicalizeForComparison(&a, &b, &CanonicalizeMaxBoundary, |
| 2219 &b, | |
| 2220 &CanonicalizeMaxBoundary, | |
| 2221 RangeBoundary::PositiveInfinity())) { | 2166 RangeBoundary::PositiveInfinity())) { |
| 2222 return (a.offset() >= b.offset()) ? a : b; | 2167 return (a.offset() >= b.offset()) ? a : b; |
| 2223 } | 2168 } |
| 2224 | 2169 |
| 2225 const int64_t inf_a = a.LowerBound(size); | 2170 const int64_t inf_a = a.LowerBound(size); |
| 2226 const int64_t inf_b = b.LowerBound(size); | 2171 const int64_t inf_b = b.LowerBound(size); |
| 2227 const int64_t sup_a = a.UpperBound(size); | 2172 const int64_t sup_a = a.UpperBound(size); |
| 2228 const int64_t sup_b = b.UpperBound(size); | 2173 const int64_t sup_b = b.UpperBound(size); |
| 2229 | 2174 |
| 2230 if ((sup_a <= inf_b) && !b.UpperBound().Overflowed(size)) { | 2175 if ((sup_a <= inf_b) && !b.UpperBound().Overflowed(size)) { |
| (...skipping 13 matching lines...) Expand all Loading... |
| 2244 if (a.Equals(b)) { | 2189 if (a.Equals(b)) { |
| 2245 return a; | 2190 return a; |
| 2246 } | 2191 } |
| 2247 | 2192 |
| 2248 if (a.IsMinimumOrBelow(RangeBoundary::kRangeBoundarySmi)) { | 2193 if (a.IsMinimumOrBelow(RangeBoundary::kRangeBoundarySmi)) { |
| 2249 return b; | 2194 return b; |
| 2250 } else if (b.IsMinimumOrBelow(RangeBoundary::kRangeBoundarySmi)) { | 2195 } else if (b.IsMinimumOrBelow(RangeBoundary::kRangeBoundarySmi)) { |
| 2251 return a; | 2196 return a; |
| 2252 } | 2197 } |
| 2253 | 2198 |
| 2254 if (CanonicalizeForComparison(&a, | 2199 if (CanonicalizeForComparison(&a, &b, &CanonicalizeMinBoundary, |
| 2255 &b, | |
| 2256 &CanonicalizeMinBoundary, | |
| 2257 RangeBoundary::NegativeInfinity())) { | 2200 RangeBoundary::NegativeInfinity())) { |
| 2258 return (a.offset() >= b.offset()) ? a : b; | 2201 return (a.offset() >= b.offset()) ? a : b; |
| 2259 } | 2202 } |
| 2260 | 2203 |
| 2261 const int64_t inf_a = a.SmiLowerBound(); | 2204 const int64_t inf_a = a.SmiLowerBound(); |
| 2262 const int64_t inf_b = b.SmiLowerBound(); | 2205 const int64_t inf_b = b.SmiLowerBound(); |
| 2263 | 2206 |
| 2264 return (inf_a >= inf_b) ? a : b; | 2207 return (inf_a >= inf_b) ? a : b; |
| 2265 } | 2208 } |
| 2266 | 2209 |
| 2267 | 2210 |
| 2268 RangeBoundary RangeBoundary::IntersectionMax(RangeBoundary a, RangeBoundary b) { | 2211 RangeBoundary RangeBoundary::IntersectionMax(RangeBoundary a, RangeBoundary b) { |
| 2269 ASSERT(!a.IsNegativeInfinity() && !b.IsNegativeInfinity()); | 2212 ASSERT(!a.IsNegativeInfinity() && !b.IsNegativeInfinity()); |
| 2270 ASSERT(!a.IsUnknown() && !b.IsUnknown()); | 2213 ASSERT(!a.IsUnknown() && !b.IsUnknown()); |
| 2271 | 2214 |
| 2272 if (a.Equals(b)) { | 2215 if (a.Equals(b)) { |
| 2273 return a; | 2216 return a; |
| 2274 } | 2217 } |
| 2275 | 2218 |
| 2276 if (a.IsMaximumOrAbove(RangeBoundary::kRangeBoundarySmi)) { | 2219 if (a.IsMaximumOrAbove(RangeBoundary::kRangeBoundarySmi)) { |
| 2277 return b; | 2220 return b; |
| 2278 } else if (b.IsMaximumOrAbove(RangeBoundary::kRangeBoundarySmi)) { | 2221 } else if (b.IsMaximumOrAbove(RangeBoundary::kRangeBoundarySmi)) { |
| 2279 return a; | 2222 return a; |
| 2280 } | 2223 } |
| 2281 | 2224 |
| 2282 if (CanonicalizeForComparison(&a, | 2225 if (CanonicalizeForComparison(&a, &b, &CanonicalizeMaxBoundary, |
| 2283 &b, | |
| 2284 &CanonicalizeMaxBoundary, | |
| 2285 RangeBoundary::PositiveInfinity())) { | 2226 RangeBoundary::PositiveInfinity())) { |
| 2286 return (a.offset() <= b.offset()) ? a : b; | 2227 return (a.offset() <= b.offset()) ? a : b; |
| 2287 } | 2228 } |
| 2288 | 2229 |
| 2289 const int64_t sup_a = a.SmiUpperBound(); | 2230 const int64_t sup_a = a.SmiUpperBound(); |
| 2290 const int64_t sup_b = b.SmiUpperBound(); | 2231 const int64_t sup_b = b.SmiUpperBound(); |
| 2291 | 2232 |
| 2292 return (sup_a <= sup_b) ? a : b; | 2233 return (sup_a <= sup_b) ? a : b; |
| 2293 } | 2234 } |
| 2294 | 2235 |
| 2295 | 2236 |
| 2296 int64_t RangeBoundary::ConstantValue() const { | 2237 int64_t RangeBoundary::ConstantValue() const { |
| 2297 ASSERT(IsConstant()); | 2238 ASSERT(IsConstant()); |
| 2298 return value_; | 2239 return value_; |
| 2299 } | 2240 } |
| 2300 | 2241 |
| 2301 | 2242 |
| 2302 bool Range::IsPositive() const { | 2243 bool Range::IsPositive() const { |
| 2303 return OnlyGreaterThanOrEqualTo(0); | 2244 return OnlyGreaterThanOrEqualTo(0); |
| 2304 } | 2245 } |
| 2305 | 2246 |
| 2306 | 2247 |
| 2307 bool Range::OnlyLessThanOrEqualTo(int64_t val) const { | 2248 bool Range::OnlyLessThanOrEqualTo(int64_t val) const { |
| 2308 const RangeBoundary upper_bound = max().UpperBound(); | 2249 const RangeBoundary upper_bound = max().UpperBound(); |
| 2309 return !upper_bound.IsPositiveInfinity() && | 2250 return !upper_bound.IsPositiveInfinity() && |
| 2310 (upper_bound.ConstantValue() <= val); | 2251 (upper_bound.ConstantValue() <= val); |
| 2311 } | 2252 } |
| 2312 | 2253 |
| 2313 | 2254 |
| 2314 bool Range::OnlyGreaterThanOrEqualTo(int64_t val) const { | 2255 bool Range::OnlyGreaterThanOrEqualTo(int64_t val) const { |
| 2315 const RangeBoundary lower_bound = min().LowerBound(); | 2256 const RangeBoundary lower_bound = min().LowerBound(); |
| 2316 return !lower_bound.IsNegativeInfinity() && | 2257 return !lower_bound.IsNegativeInfinity() && |
| 2317 (lower_bound.ConstantValue() >= val); | 2258 (lower_bound.ConstantValue() >= val); |
| 2318 } | 2259 } |
| 2319 | 2260 |
| 2320 | 2261 |
| 2321 // Inclusive. | 2262 // Inclusive. |
| 2322 bool Range::IsWithin(int64_t min_int, int64_t max_int) const { | 2263 bool Range::IsWithin(int64_t min_int, int64_t max_int) const { |
| 2323 return OnlyGreaterThanOrEqualTo(min_int) && | 2264 return OnlyGreaterThanOrEqualTo(min_int) && OnlyLessThanOrEqualTo(max_int); |
| 2324 OnlyLessThanOrEqualTo(max_int); | |
| 2325 } | 2265 } |
| 2326 | 2266 |
| 2327 | 2267 |
| 2328 bool Range::Overlaps(int64_t min_int, int64_t max_int) const { | 2268 bool Range::Overlaps(int64_t min_int, int64_t max_int) const { |
| 2329 RangeBoundary lower = min().LowerBound(); | 2269 RangeBoundary lower = min().LowerBound(); |
| 2330 RangeBoundary upper = max().UpperBound(); | 2270 RangeBoundary upper = max().UpperBound(); |
| 2331 const int64_t this_min = lower.IsNegativeInfinity() ? | 2271 const int64_t this_min = |
| 2332 RangeBoundary::kMin : lower.ConstantValue(); | 2272 lower.IsNegativeInfinity() ? RangeBoundary::kMin : lower.ConstantValue(); |
| 2333 const int64_t this_max = upper.IsPositiveInfinity() ? | 2273 const int64_t this_max = |
| 2334 RangeBoundary::kMax : upper.ConstantValue(); | 2274 upper.IsPositiveInfinity() ? RangeBoundary::kMax : upper.ConstantValue(); |
| 2335 if ((this_min <= min_int) && (min_int <= this_max)) return true; | 2275 if ((this_min <= min_int) && (min_int <= this_max)) return true; |
| 2336 if ((this_min <= max_int) && (max_int <= this_max)) return true; | 2276 if ((this_min <= max_int) && (max_int <= this_max)) return true; |
| 2337 if ((min_int < this_min) && (max_int > this_max)) return true; | 2277 if ((min_int < this_min) && (max_int > this_max)) return true; |
| 2338 return false; | 2278 return false; |
| 2339 } | 2279 } |
| 2340 | 2280 |
| 2341 | 2281 |
| 2342 bool Range::IsUnsatisfiable() const { | 2282 bool Range::IsUnsatisfiable() const { |
| 2343 // Infinity case: [+inf, ...] || [..., -inf] | 2283 // Infinity case: [+inf, ...] || [..., -inf] |
| 2344 if (min().IsPositiveInfinity() || max().IsNegativeInfinity()) { | 2284 if (min().IsPositiveInfinity() || max().IsNegativeInfinity()) { |
| (...skipping 26 matching lines...) Expand all Loading... |
| 2371 RangeBoundary left_max = Range::ConstantMax(left); | 2311 RangeBoundary left_max = Range::ConstantMax(left); |
| 2372 RangeBoundary left_min = Range::ConstantMin(left); | 2312 RangeBoundary left_min = Range::ConstantMin(left); |
| 2373 // A negative shift count always deoptimizes (and throws), so the minimum | 2313 // A negative shift count always deoptimizes (and throws), so the minimum |
| 2374 // shift count is zero. | 2314 // shift count is zero. |
| 2375 int64_t right_max = Utils::Maximum(Range::ConstantMax(right).ConstantValue(), | 2315 int64_t right_max = Utils::Maximum(Range::ConstantMax(right).ConstantValue(), |
| 2376 static_cast<int64_t>(0)); | 2316 static_cast<int64_t>(0)); |
| 2377 int64_t right_min = Utils::Maximum(Range::ConstantMin(right).ConstantValue(), | 2317 int64_t right_min = Utils::Maximum(Range::ConstantMin(right).ConstantValue(), |
| 2378 static_cast<int64_t>(0)); | 2318 static_cast<int64_t>(0)); |
| 2379 | 2319 |
| 2380 *result_min = RangeBoundary::Shl( | 2320 *result_min = RangeBoundary::Shl( |
| 2381 left_min, | 2321 left_min, left_min.ConstantValue() > 0 ? right_min : right_max, |
| 2382 left_min.ConstantValue() > 0 ? right_min : right_max, | 2322 left_min.ConstantValue() > 0 ? RangeBoundary::PositiveInfinity() |
| 2383 left_min.ConstantValue() > 0 | 2323 : RangeBoundary::NegativeInfinity()); |
| 2384 ? RangeBoundary::PositiveInfinity() | |
| 2385 : RangeBoundary::NegativeInfinity()); | |
| 2386 | 2324 |
| 2387 *result_max = RangeBoundary::Shl( | 2325 *result_max = RangeBoundary::Shl( |
| 2388 left_max, | 2326 left_max, left_max.ConstantValue() > 0 ? right_max : right_min, |
| 2389 left_max.ConstantValue() > 0 ? right_max : right_min, | 2327 left_max.ConstantValue() > 0 ? RangeBoundary::PositiveInfinity() |
| 2390 left_max.ConstantValue() > 0 | 2328 : RangeBoundary::NegativeInfinity()); |
| 2391 ? RangeBoundary::PositiveInfinity() | |
| 2392 : RangeBoundary::NegativeInfinity()); | |
| 2393 } | 2329 } |
| 2394 | 2330 |
| 2395 | 2331 |
| 2396 void Range::Shr(const Range* left, | 2332 void Range::Shr(const Range* left, |
| 2397 const Range* right, | 2333 const Range* right, |
| 2398 RangeBoundary* result_min, | 2334 RangeBoundary* result_min, |
| 2399 RangeBoundary* result_max) { | 2335 RangeBoundary* result_max) { |
| 2400 RangeBoundary left_max = Range::ConstantMax(left); | 2336 RangeBoundary left_max = Range::ConstantMax(left); |
| 2401 RangeBoundary left_min = Range::ConstantMin(left); | 2337 RangeBoundary left_min = Range::ConstantMin(left); |
| 2402 // A negative shift count always deoptimizes (and throws), so the minimum | 2338 // A negative shift count always deoptimizes (and throws), so the minimum |
| 2403 // shift count is zero. | 2339 // shift count is zero. |
| 2404 int64_t right_max = Utils::Maximum(Range::ConstantMax(right).ConstantValue(), | 2340 int64_t right_max = Utils::Maximum(Range::ConstantMax(right).ConstantValue(), |
| 2405 static_cast<int64_t>(0)); | 2341 static_cast<int64_t>(0)); |
| 2406 int64_t right_min = Utils::Maximum(Range::ConstantMin(right).ConstantValue(), | 2342 int64_t right_min = Utils::Maximum(Range::ConstantMin(right).ConstantValue(), |
| 2407 static_cast<int64_t>(0)); | 2343 static_cast<int64_t>(0)); |
| 2408 | 2344 |
| 2409 *result_min = RangeBoundary::Shr( | 2345 *result_min = RangeBoundary::Shr( |
| 2410 left_min, | 2346 left_min, left_min.ConstantValue() > 0 ? right_max : right_min); |
| 2411 left_min.ConstantValue() > 0 ? right_max : right_min); | |
| 2412 | 2347 |
| 2413 *result_max = RangeBoundary::Shr( | 2348 *result_max = RangeBoundary::Shr( |
| 2414 left_max, | 2349 left_max, left_max.ConstantValue() > 0 ? right_min : right_max); |
| 2415 left_max.ConstantValue() > 0 ? right_min : right_max); | |
| 2416 } | 2350 } |
| 2417 | 2351 |
| 2418 | 2352 |
| 2419 void Range::And(const Range* left_range, | 2353 void Range::And(const Range* left_range, |
| 2420 const Range* right_range, | 2354 const Range* right_range, |
| 2421 RangeBoundary* result_min, | 2355 RangeBoundary* result_min, |
| 2422 RangeBoundary* result_max) { | 2356 RangeBoundary* result_max) { |
| 2423 ASSERT(left_range != NULL); | 2357 ASSERT(left_range != NULL); |
| 2424 ASSERT(right_range != NULL); | 2358 ASSERT(right_range != NULL); |
| 2425 ASSERT(result_min != NULL); | 2359 ASSERT(result_min != NULL); |
| (...skipping 19 matching lines...) Expand all Loading... |
| 2445 const int64_t min = Range::ConstantMin(range).ConstantValue(); | 2379 const int64_t min = Range::ConstantMin(range).ConstantValue(); |
| 2446 const int64_t max = Range::ConstantMax(range).ConstantValue(); | 2380 const int64_t max = Range::ConstantMax(range).ConstantValue(); |
| 2447 return Utils::Maximum(Utils::BitLength(min), Utils::BitLength(max)); | 2381 return Utils::Maximum(Utils::BitLength(min), Utils::BitLength(max)); |
| 2448 } | 2382 } |
| 2449 | 2383 |
| 2450 | 2384 |
| 2451 void Range::BitwiseOp(const Range* left_range, | 2385 void Range::BitwiseOp(const Range* left_range, |
| 2452 const Range* right_range, | 2386 const Range* right_range, |
| 2453 RangeBoundary* result_min, | 2387 RangeBoundary* result_min, |
| 2454 RangeBoundary* result_max) { | 2388 RangeBoundary* result_max) { |
| 2455 const int bitsize = | 2389 const int bitsize = Utils::Maximum(BitSize(left_range), BitSize(right_range)); |
| 2456 Utils::Maximum(BitSize(left_range), BitSize(right_range)); | |
| 2457 | 2390 |
| 2458 if (left_range->IsPositive() && right_range->IsPositive()) { | 2391 if (left_range->IsPositive() && right_range->IsPositive()) { |
| 2459 *result_min = RangeBoundary::FromConstant(0); | 2392 *result_min = RangeBoundary::FromConstant(0); |
| 2460 } else { | 2393 } else { |
| 2461 *result_min = RangeBoundary::FromConstant( | 2394 *result_min = |
| 2462 static_cast<int64_t>(-1) << bitsize); | 2395 RangeBoundary::FromConstant(static_cast<int64_t>(-1) << bitsize); |
| 2463 } | 2396 } |
| 2464 | 2397 |
| 2465 *result_max = RangeBoundary::FromConstant( | 2398 *result_max = |
| 2466 (static_cast<uint64_t>(1) << bitsize) - 1); | 2399 RangeBoundary::FromConstant((static_cast<uint64_t>(1) << bitsize) - 1); |
| 2467 } | 2400 } |
| 2468 | 2401 |
| 2469 | 2402 |
| 2470 static bool IsArrayLength(Definition* defn) { | 2403 static bool IsArrayLength(Definition* defn) { |
| 2471 if (defn == NULL) { | 2404 if (defn == NULL) { |
| 2472 return false; | 2405 return false; |
| 2473 } | 2406 } |
| 2474 LoadFieldInstr* load = UnwrapConstraint(defn)->AsLoadField(); | 2407 LoadFieldInstr* load = UnwrapConstraint(defn)->AsLoadField(); |
| 2475 return (load != NULL) && load->IsImmutableLengthLoad(); | 2408 return (load != NULL) && load->IsImmutableLengthLoad(); |
| 2476 } | 2409 } |
| 2477 | 2410 |
| 2478 | 2411 |
| 2479 void Range::Add(const Range* left_range, | 2412 void Range::Add(const Range* left_range, |
| 2480 const Range* right_range, | 2413 const Range* right_range, |
| 2481 RangeBoundary* result_min, | 2414 RangeBoundary* result_min, |
| 2482 RangeBoundary* result_max, | 2415 RangeBoundary* result_max, |
| 2483 Definition* left_defn) { | 2416 Definition* left_defn) { |
| 2484 ASSERT(left_range != NULL); | 2417 ASSERT(left_range != NULL); |
| 2485 ASSERT(right_range != NULL); | 2418 ASSERT(right_range != NULL); |
| 2486 ASSERT(result_min != NULL); | 2419 ASSERT(result_min != NULL); |
| 2487 ASSERT(result_max != NULL); | 2420 ASSERT(result_max != NULL); |
| 2488 | 2421 |
| 2489 RangeBoundary left_min = | 2422 RangeBoundary left_min = IsArrayLength(left_defn) |
| 2490 IsArrayLength(left_defn) ? | 2423 ? RangeBoundary::FromDefinition(left_defn) |
| 2491 RangeBoundary::FromDefinition(left_defn) : left_range->min(); | 2424 : left_range->min(); |
| 2492 | 2425 |
| 2493 RangeBoundary left_max = | 2426 RangeBoundary left_max = IsArrayLength(left_defn) |
| 2494 IsArrayLength(left_defn) ? | 2427 ? RangeBoundary::FromDefinition(left_defn) |
| 2495 RangeBoundary::FromDefinition(left_defn) : left_range->max(); | 2428 : left_range->max(); |
| 2496 | 2429 |
| 2497 if (!RangeBoundary::SymbolicAdd(left_min, right_range->min(), result_min)) { | 2430 if (!RangeBoundary::SymbolicAdd(left_min, right_range->min(), result_min)) { |
| 2498 *result_min = RangeBoundary::Add(left_range->min().LowerBound(), | 2431 *result_min = RangeBoundary::Add(left_range->min().LowerBound(), |
| 2499 right_range->min().LowerBound(), | 2432 right_range->min().LowerBound(), |
| 2500 RangeBoundary::NegativeInfinity()); | 2433 RangeBoundary::NegativeInfinity()); |
| 2501 } | 2434 } |
| 2502 if (!RangeBoundary::SymbolicAdd(left_max, right_range->max(), result_max)) { | 2435 if (!RangeBoundary::SymbolicAdd(left_max, right_range->max(), result_max)) { |
| 2503 *result_max = RangeBoundary::Add(right_range->max().UpperBound(), | 2436 *result_max = RangeBoundary::Add(right_range->max().UpperBound(), |
| 2504 left_range->max().UpperBound(), | 2437 left_range->max().UpperBound(), |
| 2505 RangeBoundary::PositiveInfinity()); | 2438 RangeBoundary::PositiveInfinity()); |
| 2506 } | 2439 } |
| 2507 } | 2440 } |
| 2508 | 2441 |
| 2509 | 2442 |
| 2510 void Range::Sub(const Range* left_range, | 2443 void Range::Sub(const Range* left_range, |
| 2511 const Range* right_range, | 2444 const Range* right_range, |
| 2512 RangeBoundary* result_min, | 2445 RangeBoundary* result_min, |
| 2513 RangeBoundary* result_max, | 2446 RangeBoundary* result_max, |
| 2514 Definition* left_defn) { | 2447 Definition* left_defn) { |
| 2515 ASSERT(left_range != NULL); | 2448 ASSERT(left_range != NULL); |
| 2516 ASSERT(right_range != NULL); | 2449 ASSERT(right_range != NULL); |
| 2517 ASSERT(result_min != NULL); | 2450 ASSERT(result_min != NULL); |
| 2518 ASSERT(result_max != NULL); | 2451 ASSERT(result_max != NULL); |
| 2519 | 2452 |
| 2520 RangeBoundary left_min = | 2453 RangeBoundary left_min = IsArrayLength(left_defn) |
| 2521 IsArrayLength(left_defn) ? | 2454 ? RangeBoundary::FromDefinition(left_defn) |
| 2522 RangeBoundary::FromDefinition(left_defn) : left_range->min(); | 2455 : left_range->min(); |
| 2523 | 2456 |
| 2524 RangeBoundary left_max = | 2457 RangeBoundary left_max = IsArrayLength(left_defn) |
| 2525 IsArrayLength(left_defn) ? | 2458 ? RangeBoundary::FromDefinition(left_defn) |
| 2526 RangeBoundary::FromDefinition(left_defn) : left_range->max(); | 2459 : left_range->max(); |
| 2527 | 2460 |
| 2528 if (!RangeBoundary::SymbolicSub(left_min, right_range->max(), result_min)) { | 2461 if (!RangeBoundary::SymbolicSub(left_min, right_range->max(), result_min)) { |
| 2529 *result_min = RangeBoundary::Sub(left_range->min().LowerBound(), | 2462 *result_min = RangeBoundary::Sub(left_range->min().LowerBound(), |
| 2530 right_range->max().UpperBound(), | 2463 right_range->max().UpperBound(), |
| 2531 RangeBoundary::NegativeInfinity()); | 2464 RangeBoundary::NegativeInfinity()); |
| 2532 } | 2465 } |
| 2533 if (!RangeBoundary::SymbolicSub(left_max, right_range->min(), result_max)) { | 2466 if (!RangeBoundary::SymbolicSub(left_max, right_range->min(), result_max)) { |
| 2534 *result_max = RangeBoundary::Sub(left_range->max().UpperBound(), | 2467 *result_max = RangeBoundary::Sub(left_range->max().UpperBound(), |
| 2535 right_range->min().LowerBound(), | 2468 right_range->min().LowerBound(), |
| 2536 RangeBoundary::PositiveInfinity()); | 2469 RangeBoundary::PositiveInfinity()); |
| 2537 } | 2470 } |
| 2538 } | 2471 } |
| 2539 | 2472 |
| 2540 | 2473 |
| 2541 void Range::Mul(const Range* left_range, | 2474 void Range::Mul(const Range* left_range, |
| (...skipping 266 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2808 UNREACHABLE(); | 2741 UNREACHABLE(); |
| 2809 return NULL; | 2742 return NULL; |
| 2810 } | 2743 } |
| 2811 } | 2744 } |
| 2812 | 2745 |
| 2813 | 2746 |
| 2814 void PhiInstr::InferRange(RangeAnalysis* analysis, Range* range) { | 2747 void PhiInstr::InferRange(RangeAnalysis* analysis, Range* range) { |
| 2815 const RangeBoundary::RangeSize size = RangeSizeForPhi(this); | 2748 const RangeBoundary::RangeSize size = RangeSizeForPhi(this); |
| 2816 for (intptr_t i = 0; i < InputCount(); i++) { | 2749 for (intptr_t i = 0; i < InputCount(); i++) { |
| 2817 Value* input = InputAt(i); | 2750 Value* input = InputAt(i); |
| 2818 Join(range, | 2751 Join(range, input->definition(), GetInputRange(analysis, size, input), |
| 2819 input->definition(), | |
| 2820 GetInputRange(analysis, size, input), | |
| 2821 size); | 2752 size); |
| 2822 } | 2753 } |
| 2823 | 2754 |
| 2824 BlockEntryInstr* phi_block = GetBlock(); | 2755 BlockEntryInstr* phi_block = GetBlock(); |
| 2825 range->set_min(EnsureAcyclicSymbol( | 2756 range->set_min( |
| 2826 phi_block, range->min(), RangeBoundary::MinSmi())); | 2757 EnsureAcyclicSymbol(phi_block, range->min(), RangeBoundary::MinSmi())); |
| 2827 range->set_max(EnsureAcyclicSymbol( | 2758 range->set_max( |
| 2828 phi_block, range->max(), RangeBoundary::MaxSmi())); | 2759 EnsureAcyclicSymbol(phi_block, range->max(), RangeBoundary::MaxSmi())); |
| 2829 } | 2760 } |
| 2830 | 2761 |
| 2831 | 2762 |
| 2832 void ConstantInstr::InferRange(RangeAnalysis* analysis, Range* range) { | 2763 void ConstantInstr::InferRange(RangeAnalysis* analysis, Range* range) { |
| 2833 if (value_.IsSmi()) { | 2764 if (value_.IsSmi()) { |
| 2834 int64_t value = Smi::Cast(value_).Value(); | 2765 int64_t value = Smi::Cast(value_).Value(); |
| 2835 *range = Range(RangeBoundary::FromConstant(value), | 2766 *range = Range(RangeBoundary::FromConstant(value), |
| 2836 RangeBoundary::FromConstant(value)); | 2767 RangeBoundary::FromConstant(value)); |
| 2837 } else if (value_.IsMint()) { | 2768 } else if (value_.IsMint()) { |
| 2838 int64_t value = Mint::Cast(value_).value(); | 2769 int64_t value = Mint::Cast(value_).value(); |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2882 *range = Range(RangeBoundary::FromConstant(0), | 2813 *range = Range(RangeBoundary::FromConstant(0), |
| 2883 RangeBoundary::FromConstant(String::kMaxElements)); | 2814 RangeBoundary::FromConstant(String::kMaxElements)); |
| 2884 break; | 2815 break; |
| 2885 | 2816 |
| 2886 default: | 2817 default: |
| 2887 Definition::InferRange(analysis, range); | 2818 Definition::InferRange(analysis, range); |
| 2888 } | 2819 } |
| 2889 } | 2820 } |
| 2890 | 2821 |
| 2891 | 2822 |
| 2892 | |
| 2893 void LoadIndexedInstr::InferRange(RangeAnalysis* analysis, Range* range) { | 2823 void LoadIndexedInstr::InferRange(RangeAnalysis* analysis, Range* range) { |
| 2894 switch (class_id()) { | 2824 switch (class_id()) { |
| 2895 case kTypedDataInt8ArrayCid: | 2825 case kTypedDataInt8ArrayCid: |
| 2896 *range = Range(RangeBoundary::FromConstant(-128), | 2826 *range = Range(RangeBoundary::FromConstant(-128), |
| 2897 RangeBoundary::FromConstant(127)); | 2827 RangeBoundary::FromConstant(127)); |
| 2898 break; | 2828 break; |
| 2899 case kTypedDataUint8ArrayCid: | 2829 case kTypedDataUint8ArrayCid: |
| 2900 case kTypedDataUint8ClampedArrayCid: | 2830 case kTypedDataUint8ClampedArrayCid: |
| 2901 case kExternalTypedDataUint8ArrayCid: | 2831 case kExternalTypedDataUint8ArrayCid: |
| 2902 case kExternalTypedDataUint8ClampedArrayCid: | 2832 case kExternalTypedDataUint8ClampedArrayCid: |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2947 default: | 2877 default: |
| 2948 UNREACHABLE(); | 2878 UNREACHABLE(); |
| 2949 break; | 2879 break; |
| 2950 } | 2880 } |
| 2951 } | 2881 } |
| 2952 | 2882 |
| 2953 | 2883 |
| 2954 void IfThenElseInstr::InferRange(RangeAnalysis* analysis, Range* range) { | 2884 void IfThenElseInstr::InferRange(RangeAnalysis* analysis, Range* range) { |
| 2955 const intptr_t min = Utils::Minimum(if_true_, if_false_); | 2885 const intptr_t min = Utils::Minimum(if_true_, if_false_); |
| 2956 const intptr_t max = Utils::Maximum(if_true_, if_false_); | 2886 const intptr_t max = Utils::Maximum(if_true_, if_false_); |
| 2957 *range = Range(RangeBoundary::FromConstant(min), | 2887 *range = |
| 2958 RangeBoundary::FromConstant(max)); | 2888 Range(RangeBoundary::FromConstant(min), RangeBoundary::FromConstant(max)); |
| 2959 } | 2889 } |
| 2960 | 2890 |
| 2961 | 2891 |
| 2962 static RangeBoundary::RangeSize RepresentationToRangeSize(Representation r) { | 2892 static RangeBoundary::RangeSize RepresentationToRangeSize(Representation r) { |
| 2963 switch (r) { | 2893 switch (r) { |
| 2964 case kTagged: | 2894 case kTagged: |
| 2965 return RangeBoundary::kRangeBoundarySmi; | 2895 return RangeBoundary::kRangeBoundarySmi; |
| 2966 case kUnboxedInt32: | 2896 case kUnboxedInt32: |
| 2967 return RangeBoundary::kRangeBoundaryInt32; | 2897 return RangeBoundary::kRangeBoundaryInt32; |
| 2968 case kUnboxedMint: | 2898 case kUnboxedMint: |
| 2969 return RangeBoundary::kRangeBoundaryInt64; | 2899 return RangeBoundary::kRangeBoundaryInt64; |
| 2970 default: | 2900 default: |
| 2971 UNREACHABLE(); | 2901 UNREACHABLE(); |
| 2972 return RangeBoundary::kRangeBoundarySmi; | 2902 return RangeBoundary::kRangeBoundarySmi; |
| 2973 } | 2903 } |
| 2974 } | 2904 } |
| 2975 | 2905 |
| 2976 | 2906 |
| 2977 void BinaryIntegerOpInstr::InferRangeHelper(const Range* left_range, | 2907 void BinaryIntegerOpInstr::InferRangeHelper(const Range* left_range, |
| 2978 const Range* right_range, | 2908 const Range* right_range, |
| 2979 Range* range) { | 2909 Range* range) { |
| 2980 // TODO(vegorov): canonicalize BinaryIntegerOp to always have constant on the | 2910 // TODO(vegorov): canonicalize BinaryIntegerOp to always have constant on the |
| 2981 // right and a non-constant on the left. | 2911 // right and a non-constant on the left. |
| 2982 if (Range::IsUnknown(left_range) || Range::IsUnknown(right_range)) { | 2912 if (Range::IsUnknown(left_range) || Range::IsUnknown(right_range)) { |
| 2983 return; | 2913 return; |
| 2984 } | 2914 } |
| 2985 | 2915 |
| 2986 Range::BinaryOp(op_kind(), | 2916 Range::BinaryOp(op_kind(), left_range, right_range, left()->definition(), |
| 2987 left_range, | |
| 2988 right_range, | |
| 2989 left()->definition(), | |
| 2990 range); | 2917 range); |
| 2991 ASSERT(!Range::IsUnknown(range)); | 2918 ASSERT(!Range::IsUnknown(range)); |
| 2992 | 2919 |
| 2993 const RangeBoundary::RangeSize range_size = | 2920 const RangeBoundary::RangeSize range_size = |
| 2994 RepresentationToRangeSize(representation()); | 2921 RepresentationToRangeSize(representation()); |
| 2995 | 2922 |
| 2996 // Calculate overflowed status before clamping if operation is | 2923 // Calculate overflowed status before clamping if operation is |
| 2997 // not truncating. | 2924 // not truncating. |
| 2998 if (!is_truncating()) { | 2925 if (!is_truncating()) { |
| 2999 set_can_overflow(!range->Fits(range_size)); | 2926 set_can_overflow(!range->Fits(range_size)); |
| 3000 } | 2927 } |
| 3001 | 2928 |
| 3002 range->Clamp(range_size); | 2929 range->Clamp(range_size); |
| 3003 } | 2930 } |
| 3004 | 2931 |
| 3005 | 2932 |
| 3006 void BinarySmiOpInstr::InferRange(RangeAnalysis* analysis, Range* range) { | 2933 void BinarySmiOpInstr::InferRange(RangeAnalysis* analysis, Range* range) { |
| 3007 // TODO(vegorov) completely remove this once GetSmiRange is eliminated. | 2934 // TODO(vegorov) completely remove this once GetSmiRange is eliminated. |
| 3008 InferRangeHelper(analysis->GetSmiRange(left()), | 2935 InferRangeHelper(analysis->GetSmiRange(left()), |
| 3009 analysis->GetSmiRange(right()), | 2936 analysis->GetSmiRange(right()), range); |
| 3010 range); | |
| 3011 } | 2937 } |
| 3012 | 2938 |
| 3013 | 2939 |
| 3014 void BinaryInt32OpInstr::InferRange(RangeAnalysis* analysis, Range* range) { | 2940 void BinaryInt32OpInstr::InferRange(RangeAnalysis* analysis, Range* range) { |
| 3015 InferRangeHelper(analysis->GetSmiRange(left()), | 2941 InferRangeHelper(analysis->GetSmiRange(left()), |
| 3016 analysis->GetSmiRange(right()), | 2942 analysis->GetSmiRange(right()), range); |
| 3017 range); | |
| 3018 } | 2943 } |
| 3019 | 2944 |
| 3020 | 2945 |
| 3021 void BinaryMintOpInstr::InferRange(RangeAnalysis* analysis, Range* range) { | 2946 void BinaryMintOpInstr::InferRange(RangeAnalysis* analysis, Range* range) { |
| 3022 InferRangeHelper(left()->definition()->range(), | 2947 InferRangeHelper(left()->definition()->range(), |
| 3023 right()->definition()->range(), | 2948 right()->definition()->range(), range); |
| 3024 range); | |
| 3025 } | 2949 } |
| 3026 | 2950 |
| 3027 | 2951 |
| 3028 void ShiftMintOpInstr::InferRange(RangeAnalysis* analysis, Range* range) { | 2952 void ShiftMintOpInstr::InferRange(RangeAnalysis* analysis, Range* range) { |
| 3029 InferRangeHelper(left()->definition()->range(), | 2953 InferRangeHelper(left()->definition()->range(), |
| 3030 right()->definition()->range(), | 2954 right()->definition()->range(), range); |
| 3031 range); | |
| 3032 } | 2955 } |
| 3033 | 2956 |
| 3034 | 2957 |
| 3035 void BoxIntegerInstr::InferRange(RangeAnalysis* analysis, Range* range) { | 2958 void BoxIntegerInstr::InferRange(RangeAnalysis* analysis, Range* range) { |
| 3036 const Range* value_range = value()->definition()->range(); | 2959 const Range* value_range = value()->definition()->range(); |
| 3037 if (!Range::IsUnknown(value_range)) { | 2960 if (!Range::IsUnknown(value_range)) { |
| 3038 *range = *value_range; | 2961 *range = *value_range; |
| 3039 } | 2962 } |
| 3040 } | 2963 } |
| 3041 | 2964 |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3089 *range = *value_range; | 3012 *range = *value_range; |
| 3090 } else if (!value()->definition()->IsMintDefinition() && | 3013 } else if (!value()->definition()->IsMintDefinition() && |
| 3091 (value()->definition()->Type()->ToCid() != kSmiCid)) { | 3014 (value()->definition()->Type()->ToCid() != kSmiCid)) { |
| 3092 *range = Range::Full(RangeBoundary::kRangeBoundaryInt64); | 3015 *range = Range::Full(RangeBoundary::kRangeBoundaryInt64); |
| 3093 } | 3016 } |
| 3094 } | 3017 } |
| 3095 | 3018 |
| 3096 | 3019 |
| 3097 void UnboxedIntConverterInstr::InferRange(RangeAnalysis* analysis, | 3020 void UnboxedIntConverterInstr::InferRange(RangeAnalysis* analysis, |
| 3098 Range* range) { | 3021 Range* range) { |
| 3099 ASSERT((from() == kUnboxedInt32) || | 3022 ASSERT((from() == kUnboxedInt32) || (from() == kUnboxedMint) || |
| 3100 (from() == kUnboxedMint) || | |
| 3101 (from() == kUnboxedUint32)); | 3023 (from() == kUnboxedUint32)); |
| 3102 ASSERT((to() == kUnboxedInt32) || | 3024 ASSERT((to() == kUnboxedInt32) || (to() == kUnboxedMint) || |
| 3103 (to() == kUnboxedMint) || | |
| 3104 (to() == kUnboxedUint32)); | 3025 (to() == kUnboxedUint32)); |
| 3105 const Range* value_range = value()->definition()->range(); | 3026 const Range* value_range = value()->definition()->range(); |
| 3106 if (Range::IsUnknown(value_range)) { | 3027 if (Range::IsUnknown(value_range)) { |
| 3107 return; | 3028 return; |
| 3108 } | 3029 } |
| 3109 | 3030 |
| 3110 if (to() == kUnboxedUint32) { | 3031 if (to() == kUnboxedUint32) { |
| 3111 // TODO(vegorov): improve range information for unboxing to Uint32. | 3032 // TODO(vegorov): improve range information for unboxing to Uint32. |
| 3112 *range = Range( | 3033 *range = |
| 3113 RangeBoundary::FromConstant(0), | 3034 Range(RangeBoundary::FromConstant(0), |
| 3114 RangeBoundary::FromConstant(static_cast<int64_t>(kMaxUint32))); | 3035 RangeBoundary::FromConstant(static_cast<int64_t>(kMaxUint32))); |
| 3115 } else { | 3036 } else { |
| 3116 *range = *value_range; | 3037 *range = *value_range; |
| 3117 if (to() == kUnboxedInt32) { | 3038 if (to() == kUnboxedInt32) { |
| 3118 range->Clamp(RangeBoundary::kRangeBoundaryInt32); | 3039 range->Clamp(RangeBoundary::kRangeBoundaryInt32); |
| 3119 } | 3040 } |
| 3120 } | 3041 } |
| 3121 } | 3042 } |
| 3122 | 3043 |
| 3123 | 3044 |
| 3124 bool CheckArrayBoundInstr::IsRedundant(const RangeBoundary& length) { | 3045 bool CheckArrayBoundInstr::IsRedundant(const RangeBoundary& length) { |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3169 } | 3090 } |
| 3170 } while (CanonicalizeMaxBoundary(&max) || | 3091 } while (CanonicalizeMaxBoundary(&max) || |
| 3171 CanonicalizeMinBoundary(&canonical_length)); | 3092 CanonicalizeMinBoundary(&canonical_length)); |
| 3172 | 3093 |
| 3173 // Failed to prove that maximum is bounded with array length. | 3094 // Failed to prove that maximum is bounded with array length. |
| 3174 return false; | 3095 return false; |
| 3175 } | 3096 } |
| 3176 | 3097 |
| 3177 | 3098 |
| 3178 } // namespace dart | 3099 } // namespace dart |
| OLD | NEW |