Index: runtime/vm/flow_graph_range_analysis.cc |
diff --git a/runtime/vm/flow_graph_range_analysis.cc b/runtime/vm/flow_graph_range_analysis.cc |
index 18487cc1281ab6d58b98657ca775564cef059c68..cf7c104fbc10257a022ca583b2b14cf63a8af15e 100644 |
--- a/runtime/vm/flow_graph_range_analysis.cc |
+++ b/runtime/vm/flow_graph_range_analysis.cc |
@@ -2825,6 +2825,22 @@ void LoadIndexedInstr::InferRange(RangeAnalysis* analysis, Range* range) { |
} |
+void LoadCodeUnitsInstr::InferRange(RangeAnalysis* analysis, Range* range) { |
+ ASSERT(class_id() == kOneByteStringCid || |
+ class_id() == kTwoByteStringCid); |
+ switch (class_id()) { |
+ case kOneByteStringCid: |
+ case kTwoByteStringCid: |
+ *range = Range(RangeBoundary::FromConstant(0), |
+ RangeBoundary::FromConstant(kMaxUint32)); |
Ivan Posva
2014/11/10 08:16:23
Would we ever be loading 8 bytes at a time on 64-b
zerny-google
2014/11/10 09:10:42
No. The current LoadCodeUnitsInstr will pack at mo
|
+ break; |
+ default: |
+ UNREACHABLE(); |
+ break; |
+ } |
+} |
+ |
+ |
void IfThenElseInstr::InferRange(RangeAnalysis* analysis, Range* range) { |
const intptr_t min = Utils::Minimum(if_true_, if_false_); |
const intptr_t max = Utils::Maximum(if_true_, if_false_); |