| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/flow_graph_inliner.h" | 5 #include "vm/flow_graph_inliner.h" |
| 6 | 6 |
| 7 #include "vm/aot_optimizer.h" | 7 #include "vm/aot_optimizer.h" |
| 8 #include "vm/block_scheduler.h" | 8 #include "vm/block_scheduler.h" |
| 9 #include "vm/branch_optimizer.h" | 9 #include "vm/branch_optimizer.h" |
| 10 #include "vm/compiler.h" | 10 #include "vm/compiler.h" |
| 11 #include "vm/kernel.h" | 11 #include "vm/kernel.h" |
| 12 #include "vm/kernel_to_il.h" | 12 #include "vm/kernel_to_il.h" |
| 13 #include "vm/flags.h" | 13 #include "vm/flags.h" |
| 14 #include "vm/flow_graph.h" | 14 #include "vm/flow_graph.h" |
| 15 #include "vm/flow_graph_builder.h" | 15 #include "vm/flow_graph_builder.h" |
| 16 #include "vm/flow_graph_compiler.h" | 16 #include "vm/flow_graph_compiler.h" |
| 17 #include "vm/flow_graph_type_propagator.h" | 17 #include "vm/flow_graph_type_propagator.h" |
| 18 #include "vm/il_printer.h" | 18 #include "vm/il_printer.h" |
| 19 #include "vm/jit_optimizer.h" | 19 #include "vm/jit_optimizer.h" |
| 20 #include "vm/longjump.h" | 20 #include "vm/longjump.h" |
| 21 #include "vm/object.h" | 21 #include "vm/object.h" |
| 22 #include "vm/object_store.h" | 22 #include "vm/object_store.h" |
| 23 #include "vm/timer.h" | 23 #include "vm/timer.h" |
| 24 | 24 |
| 25 namespace dart { | 25 namespace dart { |
| 26 | 26 |
| 27 DEFINE_FLAG(int, deoptimization_counter_inlining_threshold, 12, | 27 DEFINE_FLAG(int, |
| 28 "How many times we allow deoptimization before we stop inlining."); | 28 deoptimization_counter_inlining_threshold, |
| 29 12, |
| 30 "How many times we allow deoptimization before we stop inlining."); |
| 29 DEFINE_FLAG(bool, trace_inlining, false, "Trace inlining"); | 31 DEFINE_FLAG(bool, trace_inlining, false, "Trace inlining"); |
| 30 DEFINE_FLAG(charp, inlining_filter, NULL, "Inline only in named function"); | 32 DEFINE_FLAG(charp, inlining_filter, NULL, "Inline only in named function"); |
| 31 | 33 |
| 32 // Flags for inlining heuristics. | 34 // Flags for inlining heuristics. |
| 33 DEFINE_FLAG(int, inline_getters_setters_smaller_than, 10, | 35 DEFINE_FLAG(int, |
| 34 "Always inline getters and setters that have fewer instructions"); | 36 inline_getters_setters_smaller_than, |
| 35 DEFINE_FLAG(int, inlining_depth_threshold, 6, | 37 10, |
| 36 "Inline function calls up to threshold nesting depth"); | 38 "Always inline getters and setters that have fewer instructions"); |
| 37 DEFINE_FLAG(int, inlining_size_threshold, 25, | 39 DEFINE_FLAG(int, |
| 40 inlining_depth_threshold, |
| 41 6, |
| 42 "Inline function calls up to threshold nesting depth"); |
| 43 DEFINE_FLAG( |
| 44 int, |
| 45 inlining_size_threshold, |
| 46 25, |
| 38 "Always inline functions that have threshold or fewer instructions"); | 47 "Always inline functions that have threshold or fewer instructions"); |
| 39 DEFINE_FLAG(int, inlining_callee_call_sites_threshold, 1, | 48 DEFINE_FLAG(int, |
| 40 "Always inline functions containing threshold or fewer calls."); | 49 inlining_callee_call_sites_threshold, |
| 41 DEFINE_FLAG(int, inlining_callee_size_threshold, 80, | 50 1, |
| 42 "Do not inline callees larger than threshold"); | 51 "Always inline functions containing threshold or fewer calls."); |
| 43 DEFINE_FLAG(int, inlining_caller_size_threshold, 50000, | 52 DEFINE_FLAG(int, |
| 44 "Stop inlining once caller reaches the threshold."); | 53 inlining_callee_size_threshold, |
| 45 DEFINE_FLAG(int, inlining_constant_arguments_count, 1, | 54 80, |
| 46 "Inline function calls with sufficient constant arguments " | 55 "Do not inline callees larger than threshold"); |
| 47 "and up to the increased threshold on instructions"); | 56 DEFINE_FLAG(int, |
| 48 DEFINE_FLAG(int, inlining_constant_arguments_max_size_threshold, 200, | 57 inlining_caller_size_threshold, |
| 58 50000, |
| 59 "Stop inlining once caller reaches the threshold."); |
| 60 DEFINE_FLAG(int, |
| 61 inlining_constant_arguments_count, |
| 62 1, |
| 63 "Inline function calls with sufficient constant arguments " |
| 64 "and up to the increased threshold on instructions"); |
| 65 DEFINE_FLAG( |
| 66 int, |
| 67 inlining_constant_arguments_max_size_threshold, |
| 68 200, |
| 49 "Do not inline callees larger than threshold if constant arguments"); | 69 "Do not inline callees larger than threshold if constant arguments"); |
| 50 DEFINE_FLAG(int, inlining_constant_arguments_min_size_threshold, 60, | 70 DEFINE_FLAG(int, |
| 51 "Inline function calls with sufficient constant arguments " | 71 inlining_constant_arguments_min_size_threshold, |
| 52 "and up to the increased threshold on instructions"); | 72 60, |
| 53 DEFINE_FLAG(int, inlining_hotness, 10, | 73 "Inline function calls with sufficient constant arguments " |
| 54 "Inline only hotter calls, in percents (0 .. 100); " | 74 "and up to the increased threshold on instructions"); |
| 55 "default 10%: calls above-equal 10% of max-count are inlined."); | 75 DEFINE_FLAG(int, |
| 56 DEFINE_FLAG(int, inlining_recursion_depth_threshold, 1, | 76 inlining_hotness, |
| 57 "Inline recursive function calls up to threshold recursion depth."); | 77 10, |
| 58 DEFINE_FLAG(int, max_inlined_per_depth, 500, | 78 "Inline only hotter calls, in percents (0 .. 100); " |
| 59 "Max. number of inlined calls per depth"); | 79 "default 10%: calls above-equal 10% of max-count are inlined."); |
| 80 DEFINE_FLAG(int, |
| 81 inlining_recursion_depth_threshold, |
| 82 1, |
| 83 "Inline recursive function calls up to threshold recursion depth."); |
| 84 DEFINE_FLAG(int, |
| 85 max_inlined_per_depth, |
| 86 500, |
| 87 "Max. number of inlined calls per depth"); |
| 60 DEFINE_FLAG(bool, print_inlining_tree, false, "Print inlining tree"); | 88 DEFINE_FLAG(bool, print_inlining_tree, false, "Print inlining tree"); |
| 61 DEFINE_FLAG(bool, enable_inlining_annotations, false, | 89 DEFINE_FLAG(bool, |
| 90 enable_inlining_annotations, |
| 91 false, |
| 62 "Enable inlining annotations"); | 92 "Enable inlining annotations"); |
| 63 | 93 |
| 64 DECLARE_FLAG(bool, compiler_stats); | 94 DECLARE_FLAG(bool, compiler_stats); |
| 65 DECLARE_FLAG(int, max_deoptimization_counter_threshold); | 95 DECLARE_FLAG(int, max_deoptimization_counter_threshold); |
| 66 DECLARE_FLAG(bool, print_flow_graph); | 96 DECLARE_FLAG(bool, print_flow_graph); |
| 67 DECLARE_FLAG(bool, print_flow_graph_optimized); | 97 DECLARE_FLAG(bool, print_flow_graph_optimized); |
| 68 DECLARE_FLAG(bool, support_externalizable_strings); | 98 DECLARE_FLAG(bool, support_externalizable_strings); |
| 69 DECLARE_FLAG(bool, verify_compiler); | 99 DECLARE_FLAG(bool, verify_compiler); |
| 70 | 100 |
| 71 // Quick access to the current zone. | 101 // Quick access to the current zone. |
| 72 #define Z (zone()) | 102 #define Z (zone()) |
| 73 #define I (isolate()) | 103 #define I (isolate()) |
| 74 | 104 |
| 75 #define TRACE_INLINING(statement) \ | 105 #define TRACE_INLINING(statement) \ |
| 76 do { \ | 106 do { \ |
| 77 if (trace_inlining()) statement; \ | 107 if (trace_inlining()) statement; \ |
| 78 } while (false) | 108 } while (false) |
| 79 | 109 |
| 80 #define PRINT_INLINING_TREE(comment, caller, target, instance_call) \ | 110 #define PRINT_INLINING_TREE(comment, caller, target, instance_call) \ |
| 81 do { \ | 111 do { \ |
| 82 if (FLAG_print_inlining_tree) { \ | 112 if (FLAG_print_inlining_tree) { \ |
| 83 inlined_info_.Add(InlinedInfo( \ | 113 inlined_info_.Add(InlinedInfo(caller, target, inlining_depth_, \ |
| 84 caller, target, inlining_depth_, instance_call, comment)); \ | 114 instance_call, comment)); \ |
| 85 } \ | 115 } \ |
| 86 } while (false) \ | 116 } while (false) |
| 87 | 117 |
| 88 | 118 |
| 89 // Test if a call is recursive by looking in the deoptimization environment. | 119 // Test if a call is recursive by looking in the deoptimization environment. |
| 90 static bool IsCallRecursive(const Function& function, Definition* call) { | 120 static bool IsCallRecursive(const Function& function, Definition* call) { |
| 91 Environment* env = call->env(); | 121 Environment* env = call->env(); |
| 92 while (env != NULL) { | 122 while (env != NULL) { |
| 93 if (function.raw() == env->function().raw()) { | 123 if (function.raw() == env->function().raw()) { |
| 94 return true; | 124 return true; |
| 95 } | 125 } |
| 96 env = env->outer(); | 126 env = env->outer(); |
| 97 } | 127 } |
| 98 return false; | 128 return false; |
| 99 } | 129 } |
| 100 | 130 |
| 101 | 131 |
| 102 // Helper to get the default value of a formal parameter. | 132 // Helper to get the default value of a formal parameter. |
| 103 static ConstantInstr* GetDefaultValue(intptr_t i, | 133 static ConstantInstr* GetDefaultValue(intptr_t i, |
| 104 const ParsedFunction& parsed_function) { | 134 const ParsedFunction& parsed_function) { |
| 105 return new ConstantInstr(parsed_function.DefaultParameterValueAt(i)); | 135 return new ConstantInstr(parsed_function.DefaultParameterValueAt(i)); |
| 106 } | 136 } |
| 107 | 137 |
| 108 | 138 |
| 109 // Pair of an argument name and its value. | 139 // Pair of an argument name and its value. |
| 110 struct NamedArgument { | 140 struct NamedArgument { |
| 111 String* name; | 141 String* name; |
| 112 Value* value; | 142 Value* value; |
| 113 NamedArgument(String* name, Value* value) | 143 NamedArgument(String* name, Value* value) : name(name), value(value) {} |
| 114 : name(name), value(value) { } | |
| 115 }; | 144 }; |
| 116 | 145 |
| 117 | 146 |
| 118 // Helper to collect information about a callee graph when considering it for | 147 // Helper to collect information about a callee graph when considering it for |
| 119 // inlining. | 148 // inlining. |
| 120 class GraphInfoCollector : public ValueObject { | 149 class GraphInfoCollector : public ValueObject { |
| 121 public: | 150 public: |
| 122 GraphInfoCollector() | 151 GraphInfoCollector() : call_site_count_(0), instruction_count_(0) {} |
| 123 : call_site_count_(0), | |
| 124 instruction_count_(0) { } | |
| 125 | 152 |
| 126 void Collect(const FlowGraph& graph) { | 153 void Collect(const FlowGraph& graph) { |
| 127 call_site_count_ = 0; | 154 call_site_count_ = 0; |
| 128 instruction_count_ = 0; | 155 instruction_count_ = 0; |
| 129 for (BlockIterator block_it = graph.postorder_iterator(); | 156 for (BlockIterator block_it = graph.postorder_iterator(); !block_it.Done(); |
| 130 !block_it.Done(); | |
| 131 block_it.Advance()) { | 157 block_it.Advance()) { |
| 132 for (ForwardInstructionIterator it(block_it.Current()); | 158 for (ForwardInstructionIterator it(block_it.Current()); !it.Done(); |
| 133 !it.Done(); | |
| 134 it.Advance()) { | 159 it.Advance()) { |
| 135 ++instruction_count_; | 160 ++instruction_count_; |
| 136 Instruction* current = it.Current(); | 161 Instruction* current = it.Current(); |
| 137 if (current->IsStaticCall() || | 162 if (current->IsStaticCall() || current->IsClosureCall()) { |
| 138 current->IsClosureCall()) { | |
| 139 ++call_site_count_; | 163 ++call_site_count_; |
| 140 continue; | 164 continue; |
| 141 } | 165 } |
| 142 if (current->IsPolymorphicInstanceCall()) { | 166 if (current->IsPolymorphicInstanceCall()) { |
| 143 PolymorphicInstanceCallInstr* call = | 167 PolymorphicInstanceCallInstr* call = |
| 144 current->AsPolymorphicInstanceCall(); | 168 current->AsPolymorphicInstanceCall(); |
| 145 // These checks make sure that the number of call-sites counted does | 169 // These checks make sure that the number of call-sites counted does |
| 146 // not change relative to the time when the current set of inlining | 170 // not change relative to the time when the current set of inlining |
| 147 // parameters was fixed. | 171 // parameters was fixed. |
| 148 // TODO(fschneider): Determine new heuristic parameters that avoid | 172 // TODO(fschneider): Determine new heuristic parameters that avoid |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 182 inlined_depth(depth), | 206 inlined_depth(depth), |
| 183 call_instr(call), | 207 call_instr(call), |
| 184 bailout_reason(reason) {} | 208 bailout_reason(reason) {} |
| 185 }; | 209 }; |
| 186 | 210 |
| 187 | 211 |
| 188 // A collection of call sites to consider for inlining. | 212 // A collection of call sites to consider for inlining. |
| 189 class CallSites : public ValueObject { | 213 class CallSites : public ValueObject { |
| 190 public: | 214 public: |
| 191 explicit CallSites(FlowGraph* flow_graph) | 215 explicit CallSites(FlowGraph* flow_graph) |
| 192 : static_calls_(), | 216 : static_calls_(), closure_calls_(), instance_calls_() {} |
| 193 closure_calls_(), | |
| 194 instance_calls_() { } | |
| 195 | 217 |
| 196 struct InstanceCallInfo { | 218 struct InstanceCallInfo { |
| 197 PolymorphicInstanceCallInstr* call; | 219 PolymorphicInstanceCallInstr* call; |
| 198 double ratio; | 220 double ratio; |
| 199 const FlowGraph* caller_graph; | 221 const FlowGraph* caller_graph; |
| 200 InstanceCallInfo(PolymorphicInstanceCallInstr* call_arg, | 222 InstanceCallInfo(PolymorphicInstanceCallInstr* call_arg, |
| 201 FlowGraph* flow_graph) | 223 FlowGraph* flow_graph) |
| 202 : call(call_arg), | 224 : call(call_arg), ratio(0.0), caller_graph(flow_graph) {} |
| 203 ratio(0.0), | |
| 204 caller_graph(flow_graph) {} | |
| 205 const Function& caller() const { return caller_graph->function(); } | 225 const Function& caller() const { return caller_graph->function(); } |
| 206 }; | 226 }; |
| 207 | 227 |
| 208 struct StaticCallInfo { | 228 struct StaticCallInfo { |
| 209 StaticCallInstr* call; | 229 StaticCallInstr* call; |
| 210 double ratio; | 230 double ratio; |
| 211 FlowGraph* caller_graph; | 231 FlowGraph* caller_graph; |
| 212 StaticCallInfo(StaticCallInstr* value, FlowGraph* flow_graph) | 232 StaticCallInfo(StaticCallInstr* value, FlowGraph* flow_graph) |
| 213 : call(value), | 233 : call(value), ratio(0.0), caller_graph(flow_graph) {} |
| 214 ratio(0.0), | |
| 215 caller_graph(flow_graph) {} | |
| 216 const Function& caller() const { return caller_graph->function(); } | 234 const Function& caller() const { return caller_graph->function(); } |
| 217 }; | 235 }; |
| 218 | 236 |
| 219 struct ClosureCallInfo { | 237 struct ClosureCallInfo { |
| 220 ClosureCallInstr* call; | 238 ClosureCallInstr* call; |
| 221 FlowGraph* caller_graph; | 239 FlowGraph* caller_graph; |
| 222 ClosureCallInfo(ClosureCallInstr* value, FlowGraph* flow_graph) | 240 ClosureCallInfo(ClosureCallInstr* value, FlowGraph* flow_graph) |
| 223 : call(value), | 241 : call(value), caller_graph(flow_graph) {} |
| 224 caller_graph(flow_graph) {} | |
| 225 const Function& caller() const { return caller_graph->function(); } | 242 const Function& caller() const { return caller_graph->function(); } |
| 226 }; | 243 }; |
| 227 | 244 |
| 228 const GrowableArray<InstanceCallInfo>& instance_calls() const { | 245 const GrowableArray<InstanceCallInfo>& instance_calls() const { |
| 229 return instance_calls_; | 246 return instance_calls_; |
| 230 } | 247 } |
| 231 | 248 |
| 232 const GrowableArray<StaticCallInfo>& static_calls() const { | 249 const GrowableArray<StaticCallInfo>& static_calls() const { |
| 233 return static_calls_; | 250 return static_calls_; |
| 234 } | 251 } |
| 235 | 252 |
| 236 const GrowableArray<ClosureCallInfo>& closure_calls() const { | 253 const GrowableArray<ClosureCallInfo>& closure_calls() const { |
| 237 return closure_calls_; | 254 return closure_calls_; |
| 238 } | 255 } |
| 239 | 256 |
| 240 bool HasCalls() const { | 257 bool HasCalls() const { |
| 241 return !(static_calls_.is_empty() && | 258 return !(static_calls_.is_empty() && closure_calls_.is_empty() && |
| 242 closure_calls_.is_empty() && | |
| 243 instance_calls_.is_empty()); | 259 instance_calls_.is_empty()); |
| 244 } | 260 } |
| 245 | 261 |
| 246 intptr_t NumCalls() const { | 262 intptr_t NumCalls() const { |
| 247 return instance_calls_.length() + | 263 return instance_calls_.length() + static_calls_.length() + |
| 248 static_calls_.length() + | |
| 249 closure_calls_.length(); | 264 closure_calls_.length(); |
| 250 } | 265 } |
| 251 | 266 |
| 252 void Clear() { | 267 void Clear() { |
| 253 static_calls_.Clear(); | 268 static_calls_.Clear(); |
| 254 closure_calls_.Clear(); | 269 closure_calls_.Clear(); |
| 255 instance_calls_.Clear(); | 270 instance_calls_.Clear(); |
| 256 } | 271 } |
| 257 | 272 |
| 258 void ComputeCallSiteRatio(intptr_t static_call_start_ix, | 273 void ComputeCallSiteRatio(intptr_t static_call_start_ix, |
| 259 intptr_t instance_call_start_ix) { | 274 intptr_t instance_call_start_ix) { |
| 260 const intptr_t num_static_calls = | 275 const intptr_t num_static_calls = |
| 261 static_calls_.length() - static_call_start_ix; | 276 static_calls_.length() - static_call_start_ix; |
| 262 const intptr_t num_instance_calls = | 277 const intptr_t num_instance_calls = |
| 263 instance_calls_.length() - instance_call_start_ix; | 278 instance_calls_.length() - instance_call_start_ix; |
| 264 | 279 |
| 265 intptr_t max_count = 0; | 280 intptr_t max_count = 0; |
| 266 GrowableArray<intptr_t> instance_call_counts(num_instance_calls); | 281 GrowableArray<intptr_t> instance_call_counts(num_instance_calls); |
| 267 for (intptr_t i = 0; i < num_instance_calls; ++i) { | 282 for (intptr_t i = 0; i < num_instance_calls; ++i) { |
| 268 const intptr_t aggregate_count = | 283 const intptr_t aggregate_count = |
| 269 instance_calls_[i + instance_call_start_ix]. | 284 instance_calls_[i + instance_call_start_ix] |
| 270 call->ic_data().AggregateCount(); | 285 .call->ic_data() |
| 286 .AggregateCount(); |
| 271 instance_call_counts.Add(aggregate_count); | 287 instance_call_counts.Add(aggregate_count); |
| 272 if (aggregate_count > max_count) max_count = aggregate_count; | 288 if (aggregate_count > max_count) max_count = aggregate_count; |
| 273 } | 289 } |
| 274 | 290 |
| 275 GrowableArray<intptr_t> static_call_counts(num_static_calls); | 291 GrowableArray<intptr_t> static_call_counts(num_static_calls); |
| 276 for (intptr_t i = 0; i < num_static_calls; ++i) { | 292 for (intptr_t i = 0; i < num_static_calls; ++i) { |
| 277 intptr_t aggregate_count = 0; | 293 intptr_t aggregate_count = 0; |
| 278 if (static_calls_[i + static_call_start_ix].call->ic_data() == NULL) { | 294 if (static_calls_[i + static_call_start_ix].call->ic_data() == NULL) { |
| 279 aggregate_count = 0; | 295 aggregate_count = 0; |
| 280 } else { | 296 } else { |
| 281 aggregate_count = | 297 aggregate_count = static_calls_[i + static_call_start_ix] |
| 282 static_calls_[i + static_call_start_ix]. | 298 .call->ic_data() |
| 283 call->ic_data()->AggregateCount(); | 299 ->AggregateCount(); |
| 284 } | 300 } |
| 285 static_call_counts.Add(aggregate_count); | 301 static_call_counts.Add(aggregate_count); |
| 286 if (aggregate_count > max_count) max_count = aggregate_count; | 302 if (aggregate_count > max_count) max_count = aggregate_count; |
| 287 } | 303 } |
| 288 | 304 |
| 289 // max_count can be 0 if none of the calls was executed. | 305 // max_count can be 0 if none of the calls was executed. |
| 290 for (intptr_t i = 0; i < num_instance_calls; ++i) { | 306 for (intptr_t i = 0; i < num_instance_calls; ++i) { |
| 291 const double ratio = (max_count == 0) ? | 307 const double ratio = |
| 292 0.0 : static_cast<double>(instance_call_counts[i]) / max_count; | 308 (max_count == 0) |
| 309 ? 0.0 |
| 310 : static_cast<double>(instance_call_counts[i]) / max_count; |
| 293 instance_calls_[i + instance_call_start_ix].ratio = ratio; | 311 instance_calls_[i + instance_call_start_ix].ratio = ratio; |
| 294 } | 312 } |
| 295 for (intptr_t i = 0; i < num_static_calls; ++i) { | 313 for (intptr_t i = 0; i < num_static_calls; ++i) { |
| 296 const double ratio = (max_count == 0) ? | 314 const double ratio = |
| 297 0.0 : static_cast<double>(static_call_counts[i]) / max_count; | 315 (max_count == 0) |
| 316 ? 0.0 |
| 317 : static_cast<double>(static_call_counts[i]) / max_count; |
| 298 static_calls_[i + static_call_start_ix].ratio = ratio; | 318 static_calls_[i + static_call_start_ix].ratio = ratio; |
| 299 } | 319 } |
| 300 } | 320 } |
| 301 | 321 |
| 302 static void RecordAllNotInlinedFunction( | 322 static void RecordAllNotInlinedFunction( |
| 303 FlowGraph* graph, | 323 FlowGraph* graph, |
| 304 intptr_t depth, | 324 intptr_t depth, |
| 305 GrowableArray<InlinedInfo>* inlined_info) { | 325 GrowableArray<InlinedInfo>* inlined_info) { |
| 306 const Function* caller = &graph->function(); | 326 const Function* caller = &graph->function(); |
| 307 Function& target = Function::ZoneHandle(); | 327 Function& target = Function::ZoneHandle(); |
| 308 for (BlockIterator block_it = graph->postorder_iterator(); | 328 for (BlockIterator block_it = graph->postorder_iterator(); !block_it.Done(); |
| 309 !block_it.Done(); | |
| 310 block_it.Advance()) { | 329 block_it.Advance()) { |
| 311 for (ForwardInstructionIterator it(block_it.Current()); | 330 for (ForwardInstructionIterator it(block_it.Current()); !it.Done(); |
| 312 !it.Done(); | |
| 313 it.Advance()) { | 331 it.Advance()) { |
| 314 Instruction* current = it.Current(); | 332 Instruction* current = it.Current(); |
| 315 Definition* call = NULL; | 333 Definition* call = NULL; |
| 316 if (current->IsPolymorphicInstanceCall()) { | 334 if (current->IsPolymorphicInstanceCall()) { |
| 317 PolymorphicInstanceCallInstr* instance_call = | 335 PolymorphicInstanceCallInstr* instance_call = |
| 318 current->AsPolymorphicInstanceCall(); | 336 current->AsPolymorphicInstanceCall(); |
| 319 target = instance_call->ic_data().GetTargetAt(0); | 337 target = instance_call->ic_data().GetTargetAt(0); |
| 320 call = instance_call; | 338 call = instance_call; |
| 321 } else if (current->IsStaticCall()) { | 339 } else if (current->IsStaticCall()) { |
| 322 StaticCallInstr* static_call = current->AsStaticCall(); | 340 StaticCallInstr* static_call = current->AsStaticCall(); |
| 323 target = static_call->function().raw(); | 341 target = static_call->function().raw(); |
| 324 call = static_call; | 342 call = static_call; |
| 325 } else if (current->IsClosureCall()) { | 343 } else if (current->IsClosureCall()) { |
| 326 // TODO(srdjan): Add data for closure calls. | 344 // TODO(srdjan): Add data for closure calls. |
| 327 } | 345 } |
| 328 if (call != NULL) { | 346 if (call != NULL) { |
| 329 inlined_info->Add(InlinedInfo( | 347 inlined_info->Add( |
| 330 caller, &target, depth + 1, call, "Too deep")); | 348 InlinedInfo(caller, &target, depth + 1, call, "Too deep")); |
| 331 } | 349 } |
| 332 } | 350 } |
| 333 } | 351 } |
| 334 } | 352 } |
| 335 | 353 |
| 336 | 354 |
| 337 void FindCallSites(FlowGraph* graph, | 355 void FindCallSites(FlowGraph* graph, |
| 338 intptr_t depth, | 356 intptr_t depth, |
| 339 GrowableArray<InlinedInfo>* inlined_info) { | 357 GrowableArray<InlinedInfo>* inlined_info) { |
| 340 ASSERT(graph != NULL); | 358 ASSERT(graph != NULL); |
| 341 if (depth > FLAG_inlining_depth_threshold) { | 359 if (depth > FLAG_inlining_depth_threshold) { |
| 342 if (FLAG_print_inlining_tree) { | 360 if (FLAG_print_inlining_tree) { |
| 343 RecordAllNotInlinedFunction(graph, depth, inlined_info); | 361 RecordAllNotInlinedFunction(graph, depth, inlined_info); |
| 344 } | 362 } |
| 345 return; | 363 return; |
| 346 } | 364 } |
| 347 | 365 |
| 348 // Recognized methods are not treated as normal calls. They don't have | 366 // Recognized methods are not treated as normal calls. They don't have |
| 349 // calls in themselves, so we keep adding those even when at the threshold. | 367 // calls in themselves, so we keep adding those even when at the threshold. |
| 350 const bool inline_only_recognized_methods = | 368 const bool inline_only_recognized_methods = |
| 351 (depth == FLAG_inlining_depth_threshold); | 369 (depth == FLAG_inlining_depth_threshold); |
| 352 | 370 |
| 353 const intptr_t instance_call_start_ix = instance_calls_.length(); | 371 const intptr_t instance_call_start_ix = instance_calls_.length(); |
| 354 const intptr_t static_call_start_ix = static_calls_.length(); | 372 const intptr_t static_call_start_ix = static_calls_.length(); |
| 355 for (BlockIterator block_it = graph->postorder_iterator(); | 373 for (BlockIterator block_it = graph->postorder_iterator(); !block_it.Done(); |
| 356 !block_it.Done(); | |
| 357 block_it.Advance()) { | 374 block_it.Advance()) { |
| 358 for (ForwardInstructionIterator it(block_it.Current()); | 375 for (ForwardInstructionIterator it(block_it.Current()); !it.Done(); |
| 359 !it.Done(); | |
| 360 it.Advance()) { | 376 it.Advance()) { |
| 361 Instruction* current = it.Current(); | 377 Instruction* current = it.Current(); |
| 362 if (current->IsPolymorphicInstanceCall()) { | 378 if (current->IsPolymorphicInstanceCall()) { |
| 363 PolymorphicInstanceCallInstr* instance_call = | 379 PolymorphicInstanceCallInstr* instance_call = |
| 364 current->AsPolymorphicInstanceCall(); | 380 current->AsPolymorphicInstanceCall(); |
| 365 if (!inline_only_recognized_methods || | 381 if (!inline_only_recognized_methods || |
| 366 instance_call->HasSingleRecognizedTarget() || | 382 instance_call->HasSingleRecognizedTarget() || |
| 367 instance_call->ic_data().HasOnlyDispatcherTargets()) { | 383 instance_call->ic_data().HasOnlyDispatcherTargets()) { |
| 368 instance_calls_.Add(InstanceCallInfo(instance_call, graph)); | 384 instance_calls_.Add(InstanceCallInfo(instance_call, graph)); |
| 369 } else { | 385 } else { |
| 370 // Method not inlined because inlining too deep and method | 386 // Method not inlined because inlining too deep and method |
| 371 // not recognized. | 387 // not recognized. |
| 372 if (FLAG_print_inlining_tree) { | 388 if (FLAG_print_inlining_tree) { |
| 373 const Function* caller = &graph->function(); | 389 const Function* caller = &graph->function(); |
| 374 const Function* target = | 390 const Function* target = &Function::ZoneHandle( |
| 375 &Function::ZoneHandle( | 391 instance_call->ic_data().GetTargetAt(0)); |
| 376 instance_call->ic_data().GetTargetAt(0)); | 392 inlined_info->Add(InlinedInfo(caller, target, depth + 1, |
| 377 inlined_info->Add(InlinedInfo( | 393 instance_call, "Too deep")); |
| 378 caller, target, depth + 1, instance_call, "Too deep")); | |
| 379 } | 394 } |
| 380 } | 395 } |
| 381 } else if (current->IsStaticCall()) { | 396 } else if (current->IsStaticCall()) { |
| 382 StaticCallInstr* static_call = current->AsStaticCall(); | 397 StaticCallInstr* static_call = current->AsStaticCall(); |
| 383 if (!inline_only_recognized_methods || | 398 if (!inline_only_recognized_methods || |
| 384 static_call->function().IsRecognized()) { | 399 static_call->function().IsRecognized()) { |
| 385 static_calls_.Add(StaticCallInfo(static_call, graph)); | 400 static_calls_.Add(StaticCallInfo(static_call, graph)); |
| 386 } else { | 401 } else { |
| 387 // Method not inlined because inlining too deep and method | 402 // Method not inlined because inlining too deep and method |
| 388 // not recognized. | 403 // not recognized. |
| 389 if (FLAG_print_inlining_tree) { | 404 if (FLAG_print_inlining_tree) { |
| 390 const Function* caller = &graph->function(); | 405 const Function* caller = &graph->function(); |
| 391 const Function* target = &static_call->function(); | 406 const Function* target = &static_call->function(); |
| 392 inlined_info->Add(InlinedInfo( | 407 inlined_info->Add(InlinedInfo(caller, target, depth + 1, |
| 393 caller, target, depth + 1, static_call, "Too deep")); | 408 static_call, "Too deep")); |
| 394 } | 409 } |
| 395 } | 410 } |
| 396 } else if (current->IsClosureCall()) { | 411 } else if (current->IsClosureCall()) { |
| 397 if (!inline_only_recognized_methods) { | 412 if (!inline_only_recognized_methods) { |
| 398 ClosureCallInstr* closure_call = current->AsClosureCall(); | 413 ClosureCallInstr* closure_call = current->AsClosureCall(); |
| 399 closure_calls_.Add(ClosureCallInfo(closure_call, graph)); | 414 closure_calls_.Add(ClosureCallInfo(closure_call, graph)); |
| 400 } | 415 } |
| 401 } | 416 } |
| 402 } | 417 } |
| 403 } | 418 } |
| (...skipping 13 matching lines...) Expand all Loading... |
| 417 InlinedCallData(Definition* call, | 432 InlinedCallData(Definition* call, |
| 418 GrowableArray<Value*>* arguments, | 433 GrowableArray<Value*>* arguments, |
| 419 const Function& caller, | 434 const Function& caller, |
| 420 intptr_t caller_inlining_id) | 435 intptr_t caller_inlining_id) |
| 421 : call(call), | 436 : call(call), |
| 422 arguments(arguments), | 437 arguments(arguments), |
| 423 callee_graph(NULL), | 438 callee_graph(NULL), |
| 424 parameter_stubs(NULL), | 439 parameter_stubs(NULL), |
| 425 exit_collector(NULL), | 440 exit_collector(NULL), |
| 426 caller(caller), | 441 caller(caller), |
| 427 caller_inlining_id_(caller_inlining_id) { } | 442 caller_inlining_id_(caller_inlining_id) {} |
| 428 | 443 |
| 429 Definition* call; | 444 Definition* call; |
| 430 GrowableArray<Value*>* arguments; | 445 GrowableArray<Value*>* arguments; |
| 431 FlowGraph* callee_graph; | 446 FlowGraph* callee_graph; |
| 432 ZoneGrowableArray<Definition*>* parameter_stubs; | 447 ZoneGrowableArray<Definition*>* parameter_stubs; |
| 433 InlineExitCollector* exit_collector; | 448 InlineExitCollector* exit_collector; |
| 434 const Function& caller; | 449 const Function& caller; |
| 435 const intptr_t caller_inlining_id_; | 450 const intptr_t caller_inlining_id_; |
| 436 }; | 451 }; |
| 437 | 452 |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 500 caller_graph_(inliner->flow_graph()), | 515 caller_graph_(inliner->flow_graph()), |
| 501 inlined_(false), | 516 inlined_(false), |
| 502 initial_size_(inliner->flow_graph()->InstructionCount()), | 517 initial_size_(inliner->flow_graph()->InstructionCount()), |
| 503 inlined_size_(0), | 518 inlined_size_(0), |
| 504 inlined_recursive_call_(false), | 519 inlined_recursive_call_(false), |
| 505 inlining_depth_(1), | 520 inlining_depth_(1), |
| 506 inlining_recursion_depth_(0), | 521 inlining_recursion_depth_(0), |
| 507 collected_call_sites_(NULL), | 522 collected_call_sites_(NULL), |
| 508 inlining_call_sites_(NULL), | 523 inlining_call_sites_(NULL), |
| 509 function_cache_(), | 524 function_cache_(), |
| 510 inlined_info_() { } | 525 inlined_info_() {} |
| 511 | 526 |
| 512 FlowGraph* caller_graph() const { return caller_graph_; } | 527 FlowGraph* caller_graph() const { return caller_graph_; } |
| 513 | 528 |
| 514 Thread* thread() const { return caller_graph_->thread(); } | 529 Thread* thread() const { return caller_graph_->thread(); } |
| 515 Isolate* isolate() const { return caller_graph_->isolate(); } | 530 Isolate* isolate() const { return caller_graph_->isolate(); } |
| 516 Zone* zone() const { return caller_graph_->zone(); } | 531 Zone* zone() const { return caller_graph_->zone(); } |
| 517 | 532 |
| 518 bool trace_inlining() const { return inliner_->trace_inlining(); } | 533 bool trace_inlining() const { return inliner_->trace_inlining(); } |
| 519 | 534 |
| 520 // Inlining heuristics based on Cooper et al. 2008. | 535 // Inlining heuristics based on Cooper et al. 2008. |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 557 FLAG_deoptimization_counter_inlining_threshold) { | 572 FLAG_deoptimization_counter_inlining_threshold) { |
| 558 return; | 573 return; |
| 559 } | 574 } |
| 560 // Create two call site collections to swap between. | 575 // Create two call site collections to swap between. |
| 561 CallSites sites1(caller_graph_); | 576 CallSites sites1(caller_graph_); |
| 562 CallSites sites2(caller_graph_); | 577 CallSites sites2(caller_graph_); |
| 563 CallSites* call_sites_temp = NULL; | 578 CallSites* call_sites_temp = NULL; |
| 564 collected_call_sites_ = &sites1; | 579 collected_call_sites_ = &sites1; |
| 565 inlining_call_sites_ = &sites2; | 580 inlining_call_sites_ = &sites2; |
| 566 // Collect initial call sites. | 581 // Collect initial call sites. |
| 567 collected_call_sites_->FindCallSites(caller_graph_, | 582 collected_call_sites_->FindCallSites(caller_graph_, inlining_depth_, |
| 568 inlining_depth_, | |
| 569 &inlined_info_); | 583 &inlined_info_); |
| 570 while (collected_call_sites_->HasCalls()) { | 584 while (collected_call_sites_->HasCalls()) { |
| 571 TRACE_INLINING(THR_Print(" Depth %" Pd " ----------\n", | 585 TRACE_INLINING( |
| 572 inlining_depth_)); | 586 THR_Print(" Depth %" Pd " ----------\n", inlining_depth_)); |
| 573 if (collected_call_sites_->NumCalls() > FLAG_max_inlined_per_depth) { | 587 if (collected_call_sites_->NumCalls() > FLAG_max_inlined_per_depth) { |
| 574 break; | 588 break; |
| 575 } | 589 } |
| 576 if (FLAG_print_inlining_tree) { | 590 if (FLAG_print_inlining_tree) { |
| 577 THR_Print("**Depth % " Pd " calls to inline %" Pd "\n", | 591 THR_Print("**Depth % " Pd " calls to inline %" Pd "\n", inlining_depth_, |
| 578 inlining_depth_, collected_call_sites_->NumCalls()); | 592 collected_call_sites_->NumCalls()); |
| 579 } | 593 } |
| 580 // Swap collected and inlining arrays and clear the new collecting array. | 594 // Swap collected and inlining arrays and clear the new collecting array. |
| 581 call_sites_temp = collected_call_sites_; | 595 call_sites_temp = collected_call_sites_; |
| 582 collected_call_sites_ = inlining_call_sites_; | 596 collected_call_sites_ = inlining_call_sites_; |
| 583 inlining_call_sites_ = call_sites_temp; | 597 inlining_call_sites_ = call_sites_temp; |
| 584 collected_call_sites_->Clear(); | 598 collected_call_sites_->Clear(); |
| 585 // Inline call sites at the current depth. | 599 // Inline call sites at the current depth. |
| 586 InlineInstanceCalls(); | 600 InlineInstanceCalls(); |
| 587 InlineStaticCalls(); | 601 InlineStaticCalls(); |
| 588 InlineClosureCalls(); | 602 InlineClosureCalls(); |
| 589 // Increment the inlining depths. Checked before subsequent inlining. | 603 // Increment the inlining depths. Checked before subsequent inlining. |
| 590 ++inlining_depth_; | 604 ++inlining_depth_; |
| 591 if (inlined_recursive_call_) { | 605 if (inlined_recursive_call_) { |
| 592 ++inlining_recursion_depth_; | 606 ++inlining_recursion_depth_; |
| 593 inlined_recursive_call_ = false; | 607 inlined_recursive_call_ = false; |
| 594 } | 608 } |
| 595 } | 609 } |
| 596 | 610 |
| 597 collected_call_sites_ = NULL; | 611 collected_call_sites_ = NULL; |
| 598 inlining_call_sites_ = NULL; | 612 inlining_call_sites_ = NULL; |
| 599 } | 613 } |
| 600 | 614 |
| 601 bool inlined() const { return inlined_; } | 615 bool inlined() const { return inlined_; } |
| 602 | 616 |
| 603 double GrowthFactor() const { | 617 double GrowthFactor() const { |
| 604 return static_cast<double>(inlined_size_) / | 618 return static_cast<double>(inlined_size_) / |
| 605 static_cast<double>(initial_size_); | 619 static_cast<double>(initial_size_); |
| 606 } | 620 } |
| 607 | 621 |
| 608 // Helper to create a parameter stub from an actual argument. | 622 // Helper to create a parameter stub from an actual argument. |
| 609 Definition* CreateParameterStub(intptr_t i, | 623 Definition* CreateParameterStub(intptr_t i, |
| 610 Value* argument, | 624 Value* argument, |
| 611 FlowGraph* graph) { | 625 FlowGraph* graph) { |
| 612 ConstantInstr* constant = argument->definition()->AsConstant(); | 626 ConstantInstr* constant = argument->definition()->AsConstant(); |
| 613 if (constant != NULL) { | 627 if (constant != NULL) { |
| 614 return new(Z) ConstantInstr(constant->value()); | 628 return new (Z) ConstantInstr(constant->value()); |
| 615 } else { | 629 } else { |
| 616 ParameterInstr* param = new(Z) ParameterInstr(i, graph->graph_entry()); | 630 ParameterInstr* param = new (Z) ParameterInstr(i, graph->graph_entry()); |
| 617 param->UpdateType(*argument->Type()); | 631 param->UpdateType(*argument->Type()); |
| 618 return param; | 632 return param; |
| 619 } | 633 } |
| 620 } | 634 } |
| 621 | 635 |
| 622 bool TryInlining(const Function& function, | 636 bool TryInlining(const Function& function, |
| 623 const Array& argument_names, | 637 const Array& argument_names, |
| 624 InlinedCallData* call_data) { | 638 InlinedCallData* call_data) { |
| 625 TRACE_INLINING(THR_Print(" => %s (deopt count %d)\n", | 639 TRACE_INLINING(THR_Print(" => %s (deopt count %d)\n", function.ToCString(), |
| 626 function.ToCString(), | |
| 627 function.deoptimization_counter())); | 640 function.deoptimization_counter())); |
| 628 | 641 |
| 629 // Abort if the inlinable bit on the function is low. | 642 // Abort if the inlinable bit on the function is low. |
| 630 if (!function.CanBeInlined()) { | 643 if (!function.CanBeInlined()) { |
| 631 TRACE_INLINING(THR_Print(" Bailout: not inlinable\n")); | 644 TRACE_INLINING(THR_Print(" Bailout: not inlinable\n")); |
| 632 PRINT_INLINING_TREE("Not inlinable", | 645 PRINT_INLINING_TREE("Not inlinable", &call_data->caller, &function, |
| 633 &call_data->caller, &function, call_data->call); | 646 call_data->call); |
| 634 return false; | 647 return false; |
| 635 } | 648 } |
| 636 | 649 |
| 637 // Don't inline any intrinsified functions in precompiled mode | 650 // Don't inline any intrinsified functions in precompiled mode |
| 638 // to reduce code size and make sure we use the intrinsic code. | 651 // to reduce code size and make sure we use the intrinsic code. |
| 639 if (FLAG_precompiled_mode && | 652 if (FLAG_precompiled_mode && function.is_intrinsic() && |
| 640 function.is_intrinsic() && | |
| 641 !inliner_->AlwaysInline(function)) { | 653 !inliner_->AlwaysInline(function)) { |
| 642 TRACE_INLINING(THR_Print(" Bailout: intrinisic\n")); | 654 TRACE_INLINING(THR_Print(" Bailout: intrinisic\n")); |
| 643 PRINT_INLINING_TREE("intrinsic", | 655 PRINT_INLINING_TREE("intrinsic", &call_data->caller, &function, |
| 644 &call_data->caller, &function, call_data->call); | 656 call_data->call); |
| 645 return false; | 657 return false; |
| 646 } | 658 } |
| 647 | 659 |
| 648 // Do not rely on function type feedback or presence of code to determine | 660 // Do not rely on function type feedback or presence of code to determine |
| 649 // if a function was compiled. | 661 // if a function was compiled. |
| 650 if (!FLAG_precompiled_mode && !function.was_compiled()) { | 662 if (!FLAG_precompiled_mode && !function.was_compiled()) { |
| 651 TRACE_INLINING(THR_Print(" Bailout: not compiled yet\n")); | 663 TRACE_INLINING(THR_Print(" Bailout: not compiled yet\n")); |
| 652 PRINT_INLINING_TREE("Not compiled", | 664 PRINT_INLINING_TREE("Not compiled", &call_data->caller, &function, |
| 653 &call_data->caller, &function, call_data->call); | 665 call_data->call); |
| 654 return false; | 666 return false; |
| 655 } | 667 } |
| 656 | 668 |
| 657 // Type feedback may have been cleared for this function (ClearICDataArray), | 669 // Type feedback may have been cleared for this function (ClearICDataArray), |
| 658 // but we need it for inlining. | 670 // but we need it for inlining. |
| 659 if (!FLAG_precompiled_mode && (function.ic_data_array() == Array::null())) { | 671 if (!FLAG_precompiled_mode && (function.ic_data_array() == Array::null())) { |
| 660 TRACE_INLINING(THR_Print(" Bailout: type feedback cleared\n")); | 672 TRACE_INLINING(THR_Print(" Bailout: type feedback cleared\n")); |
| 661 PRINT_INLINING_TREE("Not compiled", | 673 PRINT_INLINING_TREE("Not compiled", &call_data->caller, &function, |
| 662 &call_data->caller, &function, call_data->call); | 674 call_data->call); |
| 663 return false; | 675 return false; |
| 664 } | 676 } |
| 665 | 677 |
| 666 // Abort if this function has deoptimized too much. | 678 // Abort if this function has deoptimized too much. |
| 667 if (function.deoptimization_counter() >= | 679 if (function.deoptimization_counter() >= |
| 668 FLAG_max_deoptimization_counter_threshold) { | 680 FLAG_max_deoptimization_counter_threshold) { |
| 669 function.set_is_inlinable(false); | 681 function.set_is_inlinable(false); |
| 670 TRACE_INLINING(THR_Print(" Bailout: deoptimization threshold\n")); | 682 TRACE_INLINING(THR_Print(" Bailout: deoptimization threshold\n")); |
| 671 PRINT_INLINING_TREE("Deoptimization threshold exceeded", | 683 PRINT_INLINING_TREE("Deoptimization threshold exceeded", |
| 672 &call_data->caller, &function, call_data->call); | 684 &call_data->caller, &function, call_data->call); |
| 673 return false; | 685 return false; |
| 674 } | 686 } |
| 675 | 687 |
| 676 const char* kNeverInlineAnnotation = "NeverInline"; | 688 const char* kNeverInlineAnnotation = "NeverInline"; |
| 677 if (FLAG_enable_inlining_annotations && | 689 if (FLAG_enable_inlining_annotations && |
| 678 HasAnnotation(function, kNeverInlineAnnotation)) { | 690 HasAnnotation(function, kNeverInlineAnnotation)) { |
| 679 TRACE_INLINING(THR_Print(" Bailout: NeverInline annotation\n")); | 691 TRACE_INLINING(THR_Print(" Bailout: NeverInline annotation\n")); |
| 680 return false; | 692 return false; |
| 681 } | 693 } |
| 682 | 694 |
| 683 GrowableArray<Value*>* arguments = call_data->arguments; | 695 GrowableArray<Value*>* arguments = call_data->arguments; |
| 684 const intptr_t constant_arguments = CountConstants(*arguments); | 696 const intptr_t constant_arguments = CountConstants(*arguments); |
| 685 if (!ShouldWeInline(function, | 697 if (!ShouldWeInline(function, function.optimized_instruction_count(), |
| 686 function.optimized_instruction_count(), | |
| 687 function.optimized_call_site_count(), | 698 function.optimized_call_site_count(), |
| 688 constant_arguments)) { | 699 constant_arguments)) { |
| 689 TRACE_INLINING(THR_Print(" Bailout: early heuristics with " | 700 TRACE_INLINING( |
| 690 "code size: %" Pd ", " | 701 THR_Print(" Bailout: early heuristics with " |
| 691 "call sites: %" Pd ", " | 702 "code size: %" Pd ", " |
| 692 "const args: %" Pd "\n", | 703 "call sites: %" Pd ", " |
| 693 function.optimized_instruction_count(), | 704 "const args: %" Pd "\n", |
| 694 function.optimized_call_site_count(), | 705 function.optimized_instruction_count(), |
| 695 constant_arguments)); | 706 function.optimized_call_site_count(), constant_arguments)); |
| 696 PRINT_INLINING_TREE("Early heuristic", | 707 PRINT_INLINING_TREE("Early heuristic", &call_data->caller, &function, |
| 697 &call_data->caller, &function, call_data->call); | 708 call_data->call); |
| 698 return false; | 709 return false; |
| 699 } | 710 } |
| 700 | 711 |
| 701 // Abort if this is a recursive occurrence. | 712 // Abort if this is a recursive occurrence. |
| 702 Definition* call = call_data->call; | 713 Definition* call = call_data->call; |
| 703 // Added 'volatile' works around a possible GCC 4.9 compiler bug. | 714 // Added 'volatile' works around a possible GCC 4.9 compiler bug. |
| 704 volatile bool is_recursive_call = IsCallRecursive(function, call); | 715 volatile bool is_recursive_call = IsCallRecursive(function, call); |
| 705 if (is_recursive_call && | 716 if (is_recursive_call && |
| 706 inlining_recursion_depth_ >= FLAG_inlining_recursion_depth_threshold) { | 717 inlining_recursion_depth_ >= FLAG_inlining_recursion_depth_threshold) { |
| 707 TRACE_INLINING(THR_Print(" Bailout: recursive function\n")); | 718 TRACE_INLINING(THR_Print(" Bailout: recursive function\n")); |
| 708 PRINT_INLINING_TREE("Recursive function", | 719 PRINT_INLINING_TREE("Recursive function", &call_data->caller, &function, |
| 709 &call_data->caller, &function, call_data->call); | 720 call_data->call); |
| 710 return false; | 721 return false; |
| 711 } | 722 } |
| 712 | 723 |
| 713 // Save and clear deopt id. | 724 // Save and clear deopt id. |
| 714 const intptr_t prev_deopt_id = thread()->deopt_id(); | 725 const intptr_t prev_deopt_id = thread()->deopt_id(); |
| 715 thread()->set_deopt_id(0); | 726 thread()->set_deopt_id(0); |
| 716 Error& error = Error::Handle(); | 727 Error& error = Error::Handle(); |
| 717 { | 728 { |
| 718 // Install bailout jump. | 729 // Install bailout jump. |
| 719 LongJumpScope jump; | 730 LongJumpScope jump; |
| 720 if (setjmp(*jump.Set()) == 0) { | 731 if (setjmp(*jump.Set()) == 0) { |
| 721 Isolate* isolate = Isolate::Current(); | 732 Isolate* isolate = Isolate::Current(); |
| 722 // Makes sure no classes are loaded during parsing in background. | 733 // Makes sure no classes are loaded during parsing in background. |
| 723 const intptr_t loading_invalidation_gen_at_start = | 734 const intptr_t loading_invalidation_gen_at_start = |
| 724 isolate->loading_invalidation_gen(); | 735 isolate->loading_invalidation_gen(); |
| 725 | 736 |
| 726 if (Compiler::IsBackgroundCompilation()) { | 737 if (Compiler::IsBackgroundCompilation()) { |
| 727 if (isolate->IsTopLevelParsing() || | 738 if (isolate->IsTopLevelParsing() || |
| 728 (loading_invalidation_gen_at_start != | 739 (loading_invalidation_gen_at_start != |
| 729 isolate->loading_invalidation_gen())) { | 740 isolate->loading_invalidation_gen())) { |
| 730 // Loading occured while parsing. We need to abort here because | 741 // Loading occured while parsing. We need to abort here because |
| 731 // state changed while compiling. | 742 // state changed while compiling. |
| 732 Compiler::AbortBackgroundCompilation(Thread::kNoDeoptId, | 743 Compiler::AbortBackgroundCompilation( |
| 733 "Loading occured while parsing in inliner"); | 744 Thread::kNoDeoptId, "Loading occured while parsing in inliner"); |
| 734 } | 745 } |
| 735 } | 746 } |
| 736 | 747 |
| 737 // Load IC data for the callee. | 748 // Load IC data for the callee. |
| 738 ZoneGrowableArray<const ICData*>* ic_data_array = | 749 ZoneGrowableArray<const ICData*>* ic_data_array = |
| 739 new(Z) ZoneGrowableArray<const ICData*>(); | 750 new (Z) ZoneGrowableArray<const ICData*>(); |
| 740 const bool clone_ic_data = Compiler::IsBackgroundCompilation(); | 751 const bool clone_ic_data = Compiler::IsBackgroundCompilation(); |
| 741 function.RestoreICDataMap(ic_data_array, clone_ic_data); | 752 function.RestoreICDataMap(ic_data_array, clone_ic_data); |
| 742 if (Compiler::IsBackgroundCompilation() && | 753 if (Compiler::IsBackgroundCompilation() && |
| 743 (function.ic_data_array() == Array::null())) { | 754 (function.ic_data_array() == Array::null())) { |
| 744 Compiler::AbortBackgroundCompilation(Thread::kNoDeoptId, | 755 Compiler::AbortBackgroundCompilation(Thread::kNoDeoptId, |
| 745 "ICData cleared while inlining"); | 756 "ICData cleared while inlining"); |
| 746 } | 757 } |
| 747 | 758 |
| 748 // Parse the callee function. | 759 // Parse the callee function. |
| 749 bool in_cache; | 760 bool in_cache; |
| 750 ParsedFunction* parsed_function; | 761 ParsedFunction* parsed_function; |
| 751 { | 762 { |
| 752 CSTAT_TIMER_SCOPE(thread(), graphinliner_parse_timer); | 763 CSTAT_TIMER_SCOPE(thread(), graphinliner_parse_timer); |
| 753 parsed_function = GetParsedFunction(function, &in_cache); | 764 parsed_function = GetParsedFunction(function, &in_cache); |
| 754 } | 765 } |
| 755 | 766 |
| 756 // Build the callee graph. | 767 // Build the callee graph. |
| 757 InlineExitCollector* exit_collector = | 768 InlineExitCollector* exit_collector = |
| 758 new(Z) InlineExitCollector(caller_graph_, call); | 769 new (Z) InlineExitCollector(caller_graph_, call); |
| 759 FlowGraph* callee_graph; | 770 FlowGraph* callee_graph; |
| 760 if (UseKernelFrontEndFor(parsed_function)) { | 771 if (UseKernelFrontEndFor(parsed_function)) { |
| 761 kernel::TreeNode* node = static_cast<kernel::TreeNode*>( | 772 kernel::TreeNode* node = static_cast<kernel::TreeNode*>( |
| 762 parsed_function->function().kernel_function()); | 773 parsed_function->function().kernel_function()); |
| 763 | 774 |
| 764 kernel::FlowGraphBuilder builder(node, | 775 kernel::FlowGraphBuilder builder( |
| 765 parsed_function, | 776 node, parsed_function, *ic_data_array, exit_collector, |
| 766 *ic_data_array, | 777 Compiler::kNoOSRDeoptId, caller_graph_->max_block_id() + 1); |
| 767 exit_collector, | |
| 768 Compiler::kNoOSRDeoptId, | |
| 769 caller_graph_->max_block_id() + 1); | |
| 770 { | 778 { |
| 771 CSTAT_TIMER_SCOPE(thread(), graphinliner_build_timer); | 779 CSTAT_TIMER_SCOPE(thread(), graphinliner_build_timer); |
| 772 callee_graph = builder.BuildGraph(); | 780 callee_graph = builder.BuildGraph(); |
| 773 } | 781 } |
| 774 } else { | 782 } else { |
| 775 FlowGraphBuilder builder(*parsed_function, | 783 FlowGraphBuilder builder(*parsed_function, *ic_data_array, |
| 776 *ic_data_array, | 784 exit_collector, Compiler::kNoOSRDeoptId); |
| 777 exit_collector, | |
| 778 Compiler::kNoOSRDeoptId); | |
| 779 builder.SetInitialBlockId(caller_graph_->max_block_id()); | 785 builder.SetInitialBlockId(caller_graph_->max_block_id()); |
| 780 { | 786 { |
| 781 CSTAT_TIMER_SCOPE(thread(), graphinliner_build_timer); | 787 CSTAT_TIMER_SCOPE(thread(), graphinliner_build_timer); |
| 782 callee_graph = builder.BuildGraph(); | 788 callee_graph = builder.BuildGraph(); |
| 783 } | 789 } |
| 784 } | 790 } |
| 785 | 791 |
| 786 // The parameter stubs are a copy of the actual arguments providing | 792 // The parameter stubs are a copy of the actual arguments providing |
| 787 // concrete information about the values, for example constant values, | 793 // concrete information about the values, for example constant values, |
| 788 // without linking between the caller and callee graphs. | 794 // without linking between the caller and callee graphs. |
| 789 // TODO(zerny): Put more information in the stubs, eg, type information. | 795 // TODO(zerny): Put more information in the stubs, eg, type information. |
| 790 ZoneGrowableArray<Definition*>* param_stubs = | 796 ZoneGrowableArray<Definition*>* param_stubs = |
| 791 new(Z) ZoneGrowableArray<Definition*>( | 797 new (Z) ZoneGrowableArray<Definition*>(function.NumParameters()); |
| 792 function.NumParameters()); | |
| 793 | 798 |
| 794 // Create a parameter stub for each fixed positional parameter. | 799 // Create a parameter stub for each fixed positional parameter. |
| 795 for (intptr_t i = 0; i < function.num_fixed_parameters(); ++i) { | 800 for (intptr_t i = 0; i < function.num_fixed_parameters(); ++i) { |
| 796 param_stubs->Add(CreateParameterStub(i, (*arguments)[i], | 801 param_stubs->Add( |
| 797 callee_graph)); | 802 CreateParameterStub(i, (*arguments)[i], callee_graph)); |
| 798 } | 803 } |
| 799 | 804 |
| 800 // If the callee has optional parameters, rebuild the argument and stub | 805 // If the callee has optional parameters, rebuild the argument and stub |
| 801 // arrays so that actual arguments are in one-to-one with the formal | 806 // arrays so that actual arguments are in one-to-one with the formal |
| 802 // parameters. | 807 // parameters. |
| 803 if (function.HasOptionalParameters()) { | 808 if (function.HasOptionalParameters()) { |
| 804 TRACE_INLINING(THR_Print(" adjusting for optional parameters\n")); | 809 TRACE_INLINING(THR_Print(" adjusting for optional parameters\n")); |
| 805 if (!AdjustForOptionalParameters(*parsed_function, | 810 if (!AdjustForOptionalParameters(*parsed_function, argument_names, |
| 806 argument_names, | 811 arguments, param_stubs, |
| 807 arguments, | |
| 808 param_stubs, | |
| 809 callee_graph)) { | 812 callee_graph)) { |
| 810 function.set_is_inlinable(false); | 813 function.set_is_inlinable(false); |
| 811 TRACE_INLINING(THR_Print(" Bailout: optional arg mismatch\n")); | 814 TRACE_INLINING(THR_Print(" Bailout: optional arg mismatch\n")); |
| 812 PRINT_INLINING_TREE("Optional arg mismatch", | 815 PRINT_INLINING_TREE("Optional arg mismatch", &call_data->caller, |
| 813 &call_data->caller, &function, call_data->call); | 816 &function, call_data->call); |
| 814 return false; | 817 return false; |
| 815 } | 818 } |
| 816 } | 819 } |
| 817 | 820 |
| 818 // After treating optional parameters the actual/formal count must | 821 // After treating optional parameters the actual/formal count must |
| 819 // match. | 822 // match. |
| 820 ASSERT(arguments->length() == function.NumParameters()); | 823 ASSERT(arguments->length() == function.NumParameters()); |
| 821 ASSERT(param_stubs->length() == callee_graph->parameter_count()); | 824 ASSERT(param_stubs->length() == callee_graph->parameter_count()); |
| 822 | 825 |
| 823 // Update try-index of the callee graph. | 826 // Update try-index of the callee graph. |
| (...skipping 17 matching lines...) Expand all Loading... |
| 841 param_stubs); | 844 param_stubs); |
| 842 DEBUG_ASSERT(callee_graph->VerifyUseLists()); | 845 DEBUG_ASSERT(callee_graph->VerifyUseLists()); |
| 843 } | 846 } |
| 844 | 847 |
| 845 { | 848 { |
| 846 CSTAT_TIMER_SCOPE(thread(), graphinliner_opt_timer); | 849 CSTAT_TIMER_SCOPE(thread(), graphinliner_opt_timer); |
| 847 // TODO(fschneider): Improve suppression of speculative inlining. | 850 // TODO(fschneider): Improve suppression of speculative inlining. |
| 848 // Deopt-ids overlap between caller and callee. | 851 // Deopt-ids overlap between caller and callee. |
| 849 if (FLAG_precompiled_mode) { | 852 if (FLAG_precompiled_mode) { |
| 850 #ifdef DART_PRECOMPILER | 853 #ifdef DART_PRECOMPILER |
| 851 AotOptimizer optimizer(inliner_->precompiler_, | 854 AotOptimizer optimizer(inliner_->precompiler_, callee_graph, |
| 852 callee_graph, | |
| 853 inliner_->use_speculative_inlining_, | 855 inliner_->use_speculative_inlining_, |
| 854 inliner_->inlining_black_list_); | 856 inliner_->inlining_black_list_); |
| 855 optimizer.PopulateWithICData(); | 857 optimizer.PopulateWithICData(); |
| 856 | 858 |
| 857 optimizer.ApplyClassIds(); | 859 optimizer.ApplyClassIds(); |
| 858 DEBUG_ASSERT(callee_graph->VerifyUseLists()); | 860 DEBUG_ASSERT(callee_graph->VerifyUseLists()); |
| 859 | 861 |
| 860 FlowGraphTypePropagator::Propagate(callee_graph); | 862 FlowGraphTypePropagator::Propagate(callee_graph); |
| 861 DEBUG_ASSERT(callee_graph->VerifyUseLists()); | 863 DEBUG_ASSERT(callee_graph->VerifyUseLists()); |
| 862 | 864 |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 914 // Use heuristics do decide if this call should be inlined. | 916 // Use heuristics do decide if this call should be inlined. |
| 915 if (!ShouldWeInline(function, size, call_site_count, constants_count)) { | 917 if (!ShouldWeInline(function, size, call_site_count, constants_count)) { |
| 916 // If size is larger than all thresholds, don't consider it again. | 918 // If size is larger than all thresholds, don't consider it again. |
| 917 if ((size > FLAG_inlining_size_threshold) && | 919 if ((size > FLAG_inlining_size_threshold) && |
| 918 (call_site_count > FLAG_inlining_callee_call_sites_threshold) && | 920 (call_site_count > FLAG_inlining_callee_call_sites_threshold) && |
| 919 (size > FLAG_inlining_constant_arguments_min_size_threshold) && | 921 (size > FLAG_inlining_constant_arguments_min_size_threshold) && |
| 920 (size > FLAG_inlining_constant_arguments_max_size_threshold)) { | 922 (size > FLAG_inlining_constant_arguments_max_size_threshold)) { |
| 921 function.set_is_inlinable(false); | 923 function.set_is_inlinable(false); |
| 922 } | 924 } |
| 923 thread()->set_deopt_id(prev_deopt_id); | 925 thread()->set_deopt_id(prev_deopt_id); |
| 924 TRACE_INLINING(THR_Print(" Bailout: heuristics with " | 926 TRACE_INLINING( |
| 925 "code size: %" Pd ", " | 927 THR_Print(" Bailout: heuristics with " |
| 926 "call sites: %" Pd ", " | 928 "code size: %" Pd ", " |
| 927 "const args: %" Pd "\n", | 929 "call sites: %" Pd ", " |
| 928 size, | 930 "const args: %" Pd "\n", |
| 929 call_site_count, | 931 size, call_site_count, constants_count)); |
| 930 constants_count)); | 932 PRINT_INLINING_TREE("Heuristic fail", &call_data->caller, &function, |
| 931 PRINT_INLINING_TREE("Heuristic fail", | 933 call_data->call); |
| 932 &call_data->caller, &function, call_data->call); | |
| 933 return false; | 934 return false; |
| 934 } | 935 } |
| 935 | 936 |
| 936 // Inline dispatcher methods regardless of the current depth. | 937 // Inline dispatcher methods regardless of the current depth. |
| 937 const intptr_t depth = | 938 const intptr_t depth = (function.IsInvokeFieldDispatcher() || |
| 938 (function.IsInvokeFieldDispatcher() || | 939 function.IsNoSuchMethodDispatcher()) |
| 939 function.IsNoSuchMethodDispatcher()) ? 0 : inlining_depth_; | 940 ? 0 |
| 941 : inlining_depth_; |
| 940 collected_call_sites_->FindCallSites(callee_graph, depth, | 942 collected_call_sites_->FindCallSites(callee_graph, depth, |
| 941 &inlined_info_); | 943 &inlined_info_); |
| 942 | 944 |
| 943 // Add the function to the cache. | 945 // Add the function to the cache. |
| 944 if (!in_cache) { | 946 if (!in_cache) { |
| 945 function_cache_.Add(parsed_function); | 947 function_cache_.Add(parsed_function); |
| 946 } | 948 } |
| 947 | 949 |
| 948 // Build succeeded so we restore the bailout jump. | 950 // Build succeeded so we restore the bailout jump. |
| 949 inlined_ = true; | 951 inlined_ = true; |
| 950 inlined_size_ += size; | 952 inlined_size_ += size; |
| 951 if (is_recursive_call) { | 953 if (is_recursive_call) { |
| 952 inlined_recursive_call_ = true; | 954 inlined_recursive_call_ = true; |
| 953 } | 955 } |
| 954 thread()->set_deopt_id(prev_deopt_id); | 956 thread()->set_deopt_id(prev_deopt_id); |
| 955 | 957 |
| 956 call_data->callee_graph = callee_graph; | 958 call_data->callee_graph = callee_graph; |
| 957 call_data->parameter_stubs = param_stubs; | 959 call_data->parameter_stubs = param_stubs; |
| 958 call_data->exit_collector = exit_collector; | 960 call_data->exit_collector = exit_collector; |
| 959 | 961 |
| 960 // When inlined, we add the guarded fields of the callee to the caller's | 962 // When inlined, we add the guarded fields of the callee to the caller's |
| 961 // list of guarded fields. | 963 // list of guarded fields. |
| 962 const ZoneGrowableArray<const Field*>& callee_guarded_fields = | 964 const ZoneGrowableArray<const Field*>& callee_guarded_fields = |
| 963 *callee_graph->parsed_function().guarded_fields(); | 965 *callee_graph->parsed_function().guarded_fields(); |
| 964 for (intptr_t i = 0; i < callee_guarded_fields.length(); ++i) { | 966 for (intptr_t i = 0; i < callee_guarded_fields.length(); ++i) { |
| 965 caller_graph()-> | 967 caller_graph()->parsed_function().AddToGuardedFields( |
| 966 parsed_function().AddToGuardedFields(callee_guarded_fields[i]); | 968 callee_guarded_fields[i]); |
| 967 } | 969 } |
| 968 // When inlined, we add the deferred prefixes of the callee to the | 970 // When inlined, we add the deferred prefixes of the callee to the |
| 969 // caller's list of deferred prefixes. | 971 // caller's list of deferred prefixes. |
| 970 caller_graph()->AddToDeferredPrefixes( | 972 caller_graph()->AddToDeferredPrefixes( |
| 971 callee_graph->deferred_prefixes()); | 973 callee_graph->deferred_prefixes()); |
| 972 | 974 |
| 973 FlowGraphInliner::SetInliningId(callee_graph, | 975 FlowGraphInliner::SetInliningId( |
| 976 callee_graph, |
| 974 inliner_->NextInlineId(callee_graph->function(), | 977 inliner_->NextInlineId(callee_graph->function(), |
| 975 call_data->call->token_pos(), | 978 call_data->call->token_pos(), |
| 976 call_data->caller_inlining_id_)); | 979 call_data->caller_inlining_id_)); |
| 977 TRACE_INLINING(THR_Print(" Success\n")); | 980 TRACE_INLINING(THR_Print(" Success\n")); |
| 978 PRINT_INLINING_TREE(NULL, | 981 PRINT_INLINING_TREE(NULL, &call_data->caller, &function, call); |
| 979 &call_data->caller, &function, call); | |
| 980 return true; | 982 return true; |
| 981 } else { | 983 } else { |
| 982 error = thread()->sticky_error(); | 984 error = thread()->sticky_error(); |
| 983 thread()->clear_sticky_error(); | 985 thread()->clear_sticky_error(); |
| 984 | 986 |
| 985 if (error.IsLanguageError() && | 987 if (error.IsLanguageError() && |
| 986 (LanguageError::Cast(error).kind() == Report::kBailout)) { | 988 (LanguageError::Cast(error).kind() == Report::kBailout)) { |
| 987 if (error.raw() == Object::background_compilation_error().raw()) { | 989 if (error.raw() == Object::background_compilation_error().raw()) { |
| 988 // Fall through to exit the compilation, and retry it later. | 990 // Fall through to exit the compilation, and retry it later. |
| 989 } else { | 991 } else { |
| 990 thread()->set_deopt_id(prev_deopt_id); | 992 thread()->set_deopt_id(prev_deopt_id); |
| 991 TRACE_INLINING(THR_Print(" Bailout: %s\n", | 993 TRACE_INLINING( |
| 992 error.ToErrorCString())); | 994 THR_Print(" Bailout: %s\n", error.ToErrorCString())); |
| 993 PRINT_INLINING_TREE("Bailout", | 995 PRINT_INLINING_TREE("Bailout", &call_data->caller, &function, call); |
| 994 &call_data->caller, &function, call); | |
| 995 return false; | 996 return false; |
| 996 } | 997 } |
| 997 } else { | 998 } else { |
| 998 // Fall through to exit long jump scope. | 999 // Fall through to exit long jump scope. |
| 999 } | 1000 } |
| 1000 } | 1001 } |
| 1001 } | 1002 } |
| 1002 | 1003 |
| 1003 // Propagate a compile-time error. In precompilation we attempt to | 1004 // Propagate a compile-time error. In precompilation we attempt to |
| 1004 // inline functions that have never been compiled before; when JITing we | 1005 // inline functions that have never been compiled before; when JITing we |
| 1005 // should only see language errors in unoptimized compilation. | 1006 // should only see language errors in unoptimized compilation. |
| 1006 // Otherwise, there can be an out-of-memory error (unhandled exception). | 1007 // Otherwise, there can be an out-of-memory error (unhandled exception). |
| 1007 // In background compilation we may abort compilation as the state | 1008 // In background compilation we may abort compilation as the state |
| 1008 // changes while compiling. Propagate that 'error' and retry compilation | 1009 // changes while compiling. Propagate that 'error' and retry compilation |
| 1009 // later. | 1010 // later. |
| 1010 ASSERT(FLAG_precompiled_mode || | 1011 ASSERT(FLAG_precompiled_mode || Compiler::IsBackgroundCompilation() || |
| 1011 Compiler::IsBackgroundCompilation() || | |
| 1012 error.IsUnhandledException()); | 1012 error.IsUnhandledException()); |
| 1013 Thread::Current()->long_jump_base()->Jump(1, error); | 1013 Thread::Current()->long_jump_base()->Jump(1, error); |
| 1014 UNREACHABLE(); | 1014 UNREACHABLE(); |
| 1015 return false; | 1015 return false; |
| 1016 } | 1016 } |
| 1017 | 1017 |
| 1018 void PrintInlinedInfo(const Function& top) { | 1018 void PrintInlinedInfo(const Function& top) { |
| 1019 if (inlined_info_.length() > 0) { | 1019 if (inlined_info_.length() > 0) { |
| 1020 THR_Print("Inlining into: '%s' growth: %f (%" Pd " -> %" Pd ")\n", | 1020 THR_Print("Inlining into: '%s' growth: %f (%" Pd " -> %" Pd ")\n", |
| 1021 top.ToFullyQualifiedCString(), | 1021 top.ToFullyQualifiedCString(), GrowthFactor(), initial_size_, |
| 1022 GrowthFactor(), | 1022 inlined_size_); |
| 1023 initial_size_, | |
| 1024 inlined_size_); | |
| 1025 PrintInlinedInfoFor(top, 1); | 1023 PrintInlinedInfoFor(top, 1); |
| 1026 } | 1024 } |
| 1027 } | 1025 } |
| 1028 | 1026 |
| 1029 private: | 1027 private: |
| 1030 friend class PolymorphicInliner; | 1028 friend class PolymorphicInliner; |
| 1031 | 1029 |
| 1032 static bool Contains(const GrowableArray<intptr_t>& a, intptr_t deopt_id) { | 1030 static bool Contains(const GrowableArray<intptr_t>& a, intptr_t deopt_id) { |
| 1033 for (intptr_t i = 0; i < a.length(); i++) { | 1031 for (intptr_t i = 0; i < a.length(); i++) { |
| 1034 if (a[i] == deopt_id) return true; | 1032 if (a[i] == deopt_id) return true; |
| 1035 } | 1033 } |
| 1036 return false; | 1034 return false; |
| 1037 } | 1035 } |
| 1038 | 1036 |
| 1039 void PrintInlinedInfoFor(const Function& caller, intptr_t depth) { | 1037 void PrintInlinedInfoFor(const Function& caller, intptr_t depth) { |
| 1040 // Prevent duplicate printing as inlined_info aggregates all inlinining. | 1038 // Prevent duplicate printing as inlined_info aggregates all inlinining. |
| 1041 GrowableArray<intptr_t> call_instructions_printed; | 1039 GrowableArray<intptr_t> call_instructions_printed; |
| 1042 // Print those that were inlined. | 1040 // Print those that were inlined. |
| 1043 for (intptr_t i = 0; i < inlined_info_.length(); i++) { | 1041 for (intptr_t i = 0; i < inlined_info_.length(); i++) { |
| 1044 const InlinedInfo& info = inlined_info_[i]; | 1042 const InlinedInfo& info = inlined_info_[i]; |
| 1045 if (info.bailout_reason != NULL) { | 1043 if (info.bailout_reason != NULL) { |
| 1046 continue; | 1044 continue; |
| 1047 } | 1045 } |
| 1048 if ((info.inlined_depth == depth) && | 1046 if ((info.inlined_depth == depth) && |
| 1049 (info.caller->raw() == caller.raw()) && | 1047 (info.caller->raw() == caller.raw()) && |
| 1050 !Contains(call_instructions_printed, info.call_instr->GetDeoptId())) { | 1048 !Contains(call_instructions_printed, info.call_instr->GetDeoptId())) { |
| 1051 for (int t = 0; t < depth; t++) { | 1049 for (int t = 0; t < depth; t++) { |
| 1052 THR_Print(" "); | 1050 THR_Print(" "); |
| 1053 } | 1051 } |
| 1054 THR_Print("%" Pd " %s\n", | 1052 THR_Print("%" Pd " %s\n", info.call_instr->GetDeoptId(), |
| 1055 info.call_instr->GetDeoptId(), | 1053 info.inlined->ToQualifiedCString()); |
| 1056 info.inlined->ToQualifiedCString()); | |
| 1057 PrintInlinedInfoFor(*info.inlined, depth + 1); | 1054 PrintInlinedInfoFor(*info.inlined, depth + 1); |
| 1058 call_instructions_printed.Add(info.call_instr->GetDeoptId()); | 1055 call_instructions_printed.Add(info.call_instr->GetDeoptId()); |
| 1059 } | 1056 } |
| 1060 } | 1057 } |
| 1061 call_instructions_printed.Clear(); | 1058 call_instructions_printed.Clear(); |
| 1062 // Print those that were not inlined. | 1059 // Print those that were not inlined. |
| 1063 for (intptr_t i = 0; i < inlined_info_.length(); i++) { | 1060 for (intptr_t i = 0; i < inlined_info_.length(); i++) { |
| 1064 const InlinedInfo& info = inlined_info_[i]; | 1061 const InlinedInfo& info = inlined_info_[i]; |
| 1065 if (info.bailout_reason == NULL) { | 1062 if (info.bailout_reason == NULL) { |
| 1066 continue; | 1063 continue; |
| 1067 } | 1064 } |
| 1068 if ((info.inlined_depth == depth) && | 1065 if ((info.inlined_depth == depth) && |
| 1069 (info.caller->raw() == caller.raw()) && | 1066 (info.caller->raw() == caller.raw()) && |
| 1070 !Contains(call_instructions_printed, info.call_instr->GetDeoptId())) { | 1067 !Contains(call_instructions_printed, info.call_instr->GetDeoptId())) { |
| 1071 for (int t = 0; t < depth; t++) { | 1068 for (int t = 0; t < depth; t++) { |
| 1072 THR_Print(" "); | 1069 THR_Print(" "); |
| 1073 } | 1070 } |
| 1074 THR_Print("NO %" Pd " %s - %s\n", | 1071 THR_Print("NO %" Pd " %s - %s\n", info.call_instr->GetDeoptId(), |
| 1075 info.call_instr->GetDeoptId(), | 1072 info.inlined->ToQualifiedCString(), info.bailout_reason); |
| 1076 info.inlined->ToQualifiedCString(), | |
| 1077 info.bailout_reason); | |
| 1078 call_instructions_printed.Add(info.call_instr->GetDeoptId()); | 1073 call_instructions_printed.Add(info.call_instr->GetDeoptId()); |
| 1079 } | 1074 } |
| 1080 } | 1075 } |
| 1081 } | 1076 } |
| 1082 | 1077 |
| 1083 void InlineCall(InlinedCallData* call_data) { | 1078 void InlineCall(InlinedCallData* call_data) { |
| 1084 CSTAT_TIMER_SCOPE(Thread::Current(), graphinliner_subst_timer); | 1079 CSTAT_TIMER_SCOPE(Thread::Current(), graphinliner_subst_timer); |
| 1085 FlowGraph* callee_graph = call_data->callee_graph; | 1080 FlowGraph* callee_graph = call_data->callee_graph; |
| 1086 TargetEntryInstr* callee_entry = | 1081 TargetEntryInstr* callee_entry = |
| 1087 callee_graph->graph_entry()->normal_entry(); | 1082 callee_graph->graph_entry()->normal_entry(); |
| (...skipping 25 matching lines...) Expand all Loading... |
| 1113 callee_graph->graph_entry()->initial_definitions(); | 1108 callee_graph->graph_entry()->initial_definitions(); |
| 1114 for (intptr_t i = 0; i < defns->length(); ++i) { | 1109 for (intptr_t i = 0; i < defns->length(); ++i) { |
| 1115 ConstantInstr* constant = (*defns)[i]->AsConstant(); | 1110 ConstantInstr* constant = (*defns)[i]->AsConstant(); |
| 1116 if ((constant != NULL) && constant->HasUses()) { | 1111 if ((constant != NULL) && constant->HasUses()) { |
| 1117 constant->ReplaceUsesWith( | 1112 constant->ReplaceUsesWith( |
| 1118 caller_graph_->GetConstant(constant->value())); | 1113 caller_graph_->GetConstant(constant->value())); |
| 1119 } | 1114 } |
| 1120 CurrentContextInstr* context = (*defns)[i]->AsCurrentContext(); | 1115 CurrentContextInstr* context = (*defns)[i]->AsCurrentContext(); |
| 1121 if ((context != NULL) && context->HasUses()) { | 1116 if ((context != NULL) && context->HasUses()) { |
| 1122 ASSERT(call->IsClosureCall()); | 1117 ASSERT(call->IsClosureCall()); |
| 1123 LoadFieldInstr* context_load = new(Z) LoadFieldInstr( | 1118 LoadFieldInstr* context_load = new (Z) LoadFieldInstr( |
| 1124 new Value((*arguments)[0]->definition()), | 1119 new Value((*arguments)[0]->definition()), Closure::context_offset(), |
| 1125 Closure::context_offset(), | |
| 1126 AbstractType::ZoneHandle(zone(), AbstractType::null()), | 1120 AbstractType::ZoneHandle(zone(), AbstractType::null()), |
| 1127 call_data->call->token_pos()); | 1121 call_data->call->token_pos()); |
| 1128 context_load->set_is_immutable(true); | 1122 context_load->set_is_immutable(true); |
| 1129 context_load->set_ssa_temp_index( | 1123 context_load->set_ssa_temp_index(caller_graph_->alloc_ssa_temp_index()); |
| 1130 caller_graph_->alloc_ssa_temp_index()); | |
| 1131 context_load->InsertBefore(callee_entry->next()); | 1124 context_load->InsertBefore(callee_entry->next()); |
| 1132 context->ReplaceUsesWith(context_load); | 1125 context->ReplaceUsesWith(context_load); |
| 1133 } | 1126 } |
| 1134 } | 1127 } |
| 1135 | 1128 |
| 1136 // Check that inlining maintains use lists. | 1129 // Check that inlining maintains use lists. |
| 1137 DEBUG_ASSERT(!FLAG_verify_compiler || caller_graph_->VerifyUseLists()); | 1130 DEBUG_ASSERT(!FLAG_verify_compiler || caller_graph_->VerifyUseLists()); |
| 1138 } | 1131 } |
| 1139 | 1132 |
| 1140 static intptr_t CountConstants(const GrowableArray<Value*>& arguments) { | 1133 static intptr_t CountConstants(const GrowableArray<Value*>& arguments) { |
| 1141 intptr_t count = 0; | 1134 intptr_t count = 0; |
| 1142 for (intptr_t i = 0; i < arguments.length(); i++) { | 1135 for (intptr_t i = 0; i < arguments.length(); i++) { |
| 1143 if (arguments[i]->BindsToConstant()) count++; | 1136 if (arguments[i]->BindsToConstant()) count++; |
| 1144 } | 1137 } |
| 1145 return count; | 1138 return count; |
| 1146 } | 1139 } |
| 1147 | 1140 |
| 1148 // Parse a function reusing the cache if possible. | 1141 // Parse a function reusing the cache if possible. |
| 1149 ParsedFunction* GetParsedFunction(const Function& function, bool* in_cache) { | 1142 ParsedFunction* GetParsedFunction(const Function& function, bool* in_cache) { |
| 1150 // TODO(zerny): Use a hash map for the cache. | 1143 // TODO(zerny): Use a hash map for the cache. |
| 1151 for (intptr_t i = 0; i < function_cache_.length(); ++i) { | 1144 for (intptr_t i = 0; i < function_cache_.length(); ++i) { |
| 1152 ParsedFunction* parsed_function = function_cache_[i]; | 1145 ParsedFunction* parsed_function = function_cache_[i]; |
| 1153 if (parsed_function->function().raw() == function.raw()) { | 1146 if (parsed_function->function().raw() == function.raw()) { |
| 1154 *in_cache = true; | 1147 *in_cache = true; |
| 1155 return parsed_function; | 1148 return parsed_function; |
| 1156 } | 1149 } |
| 1157 } | 1150 } |
| 1158 *in_cache = false; | 1151 *in_cache = false; |
| 1159 ParsedFunction* parsed_function = | 1152 ParsedFunction* parsed_function = |
| 1160 new(Z) ParsedFunction(thread(), function); | 1153 new (Z) ParsedFunction(thread(), function); |
| 1161 if (!UseKernelFrontEndFor(parsed_function)) { | 1154 if (!UseKernelFrontEndFor(parsed_function)) { |
| 1162 Parser::ParseFunction(parsed_function); | 1155 Parser::ParseFunction(parsed_function); |
| 1163 parsed_function->AllocateVariables(); | 1156 parsed_function->AllocateVariables(); |
| 1164 } | 1157 } |
| 1165 return parsed_function; | 1158 return parsed_function; |
| 1166 } | 1159 } |
| 1167 | 1160 |
| 1168 void InlineStaticCalls() { | 1161 void InlineStaticCalls() { |
| 1169 const GrowableArray<CallSites::StaticCallInfo>& call_info = | 1162 const GrowableArray<CallSites::StaticCallInfo>& call_info = |
| 1170 inlining_call_sites_->static_calls(); | 1163 inlining_call_sites_->static_calls(); |
| 1171 TRACE_INLINING(THR_Print(" Static Calls (%" Pd ")\n", call_info.length())); | 1164 TRACE_INLINING(THR_Print(" Static Calls (%" Pd ")\n", call_info.length())); |
| 1172 for (intptr_t call_idx = 0; call_idx < call_info.length(); ++call_idx) { | 1165 for (intptr_t call_idx = 0; call_idx < call_info.length(); ++call_idx) { |
| 1173 StaticCallInstr* call = call_info[call_idx].call; | 1166 StaticCallInstr* call = call_info[call_idx].call; |
| 1174 const Function& target = call->function(); | 1167 const Function& target = call->function(); |
| 1175 if (!inliner_->AlwaysInline(target) && | 1168 if (!inliner_->AlwaysInline(target) && |
| 1176 (call_info[call_idx].ratio * 100) < FLAG_inlining_hotness) { | 1169 (call_info[call_idx].ratio * 100) < FLAG_inlining_hotness) { |
| 1177 TRACE_INLINING(THR_Print( | 1170 TRACE_INLINING( |
| 1178 " => %s (deopt count %d)\n Bailout: cold %f\n", | 1171 THR_Print(" => %s (deopt count %d)\n Bailout: cold %f\n", |
| 1179 target.ToCString(), | 1172 target.ToCString(), target.deoptimization_counter(), |
| 1180 target.deoptimization_counter(), | 1173 call_info[call_idx].ratio)); |
| 1181 call_info[call_idx].ratio)); | 1174 PRINT_INLINING_TREE("Too cold", &call_info[call_idx].caller(), |
| 1182 PRINT_INLINING_TREE("Too cold", | 1175 &call->function(), call); |
| 1183 &call_info[call_idx].caller(), &call->function(), call); | |
| 1184 continue; | 1176 continue; |
| 1185 } | 1177 } |
| 1186 GrowableArray<Value*> arguments(call->ArgumentCount()); | 1178 GrowableArray<Value*> arguments(call->ArgumentCount()); |
| 1187 for (int i = 0; i < call->ArgumentCount(); ++i) { | 1179 for (int i = 0; i < call->ArgumentCount(); ++i) { |
| 1188 arguments.Add(call->PushArgumentAt(i)->value()); | 1180 arguments.Add(call->PushArgumentAt(i)->value()); |
| 1189 } | 1181 } |
| 1190 InlinedCallData call_data( | 1182 InlinedCallData call_data( |
| 1191 call, &arguments, call_info[call_idx].caller(), | 1183 call, &arguments, call_info[call_idx].caller(), |
| 1192 call_info[call_idx].caller_graph->inlining_id()); | 1184 call_info[call_idx].caller_graph->inlining_id()); |
| 1193 if (TryInlining(call->function(), call->argument_names(), &call_data)) { | 1185 if (TryInlining(call->function(), call->argument_names(), &call_data)) { |
| 1194 InlineCall(&call_data); | 1186 InlineCall(&call_data); |
| 1195 } | 1187 } |
| 1196 } | 1188 } |
| 1197 } | 1189 } |
| 1198 | 1190 |
| 1199 void InlineClosureCalls() { | 1191 void InlineClosureCalls() { |
| 1200 const GrowableArray<CallSites::ClosureCallInfo>& call_info = | 1192 const GrowableArray<CallSites::ClosureCallInfo>& call_info = |
| 1201 inlining_call_sites_->closure_calls(); | 1193 inlining_call_sites_->closure_calls(); |
| 1202 TRACE_INLINING(THR_Print(" Closure Calls (%" Pd ")\n", | 1194 TRACE_INLINING( |
| 1203 call_info.length())); | 1195 THR_Print(" Closure Calls (%" Pd ")\n", call_info.length())); |
| 1204 for (intptr_t call_idx = 0; call_idx < call_info.length(); ++call_idx) { | 1196 for (intptr_t call_idx = 0; call_idx < call_info.length(); ++call_idx) { |
| 1205 ClosureCallInstr* call = call_info[call_idx].call; | 1197 ClosureCallInstr* call = call_info[call_idx].call; |
| 1206 // Find the closure of the callee. | 1198 // Find the closure of the callee. |
| 1207 ASSERT(call->ArgumentCount() > 0); | 1199 ASSERT(call->ArgumentCount() > 0); |
| 1208 Function& target = Function::ZoneHandle(); | 1200 Function& target = Function::ZoneHandle(); |
| 1209 AllocateObjectInstr* alloc = | 1201 AllocateObjectInstr* alloc = |
| 1210 call->ArgumentAt(0)->OriginalDefinition()->AsAllocateObject(); | 1202 call->ArgumentAt(0)->OriginalDefinition()->AsAllocateObject(); |
| 1211 if ((alloc != NULL) && !alloc->closure_function().IsNull()) { | 1203 if ((alloc != NULL) && !alloc->closure_function().IsNull()) { |
| 1212 target ^= alloc->closure_function().raw(); | 1204 target ^= alloc->closure_function().raw(); |
| 1213 ASSERT(alloc->cls().IsClosureClass()); | 1205 ASSERT(alloc->cls().IsClosureClass()); |
| 1214 } | 1206 } |
| 1215 ConstantInstr* constant = | 1207 ConstantInstr* constant = |
| 1216 call->ArgumentAt(0)->OriginalDefinition()->AsConstant(); | 1208 call->ArgumentAt(0)->OriginalDefinition()->AsConstant(); |
| 1217 if ((constant != NULL) && | 1209 if ((constant != NULL) && constant->value().IsClosure()) { |
| 1218 constant->value().IsClosure()) { | |
| 1219 target ^= Closure::Cast(constant->value()).function(); | 1210 target ^= Closure::Cast(constant->value()).function(); |
| 1220 } | 1211 } |
| 1221 | 1212 |
| 1222 if (target.IsNull()) { | 1213 if (target.IsNull()) { |
| 1223 TRACE_INLINING(THR_Print(" Bailout: non-closure operator\n")); | 1214 TRACE_INLINING(THR_Print(" Bailout: non-closure operator\n")); |
| 1224 continue; | 1215 continue; |
| 1225 } | 1216 } |
| 1226 GrowableArray<Value*> arguments(call->ArgumentCount()); | 1217 GrowableArray<Value*> arguments(call->ArgumentCount()); |
| 1227 for (int i = 0; i < call->ArgumentCount(); ++i) { | 1218 for (int i = 0; i < call->ArgumentCount(); ++i) { |
| 1228 arguments.Add(call->PushArgumentAt(i)->value()); | 1219 arguments.Add(call->PushArgumentAt(i)->value()); |
| 1229 } | 1220 } |
| 1230 InlinedCallData call_data( | 1221 InlinedCallData call_data( |
| 1231 call, &arguments, call_info[call_idx].caller(), | 1222 call, &arguments, call_info[call_idx].caller(), |
| 1232 call_info[call_idx].caller_graph->inlining_id()); | 1223 call_info[call_idx].caller_graph->inlining_id()); |
| 1233 if (TryInlining(target, | 1224 if (TryInlining(target, call->argument_names(), &call_data)) { |
| 1234 call->argument_names(), | |
| 1235 &call_data)) { | |
| 1236 InlineCall(&call_data); | 1225 InlineCall(&call_data); |
| 1237 } | 1226 } |
| 1238 } | 1227 } |
| 1239 } | 1228 } |
| 1240 | 1229 |
| 1241 void InlineInstanceCalls() { | 1230 void InlineInstanceCalls() { |
| 1242 const GrowableArray<CallSites::InstanceCallInfo>& call_info = | 1231 const GrowableArray<CallSites::InstanceCallInfo>& call_info = |
| 1243 inlining_call_sites_->instance_calls(); | 1232 inlining_call_sites_->instance_calls(); |
| 1244 TRACE_INLINING(THR_Print(" Polymorphic Instance Calls (%" Pd ")\n", | 1233 TRACE_INLINING(THR_Print(" Polymorphic Instance Calls (%" Pd ")\n", |
| 1245 call_info.length())); | 1234 call_info.length())); |
| 1246 for (intptr_t call_idx = 0; call_idx < call_info.length(); ++call_idx) { | 1235 for (intptr_t call_idx = 0; call_idx < call_info.length(); ++call_idx) { |
| 1247 PolymorphicInstanceCallInstr* call = call_info[call_idx].call; | 1236 PolymorphicInstanceCallInstr* call = call_info[call_idx].call; |
| 1248 if (call->with_checks()) { | 1237 if (call->with_checks()) { |
| 1249 // PolymorphicInliner introduces deoptimization paths. | 1238 // PolymorphicInliner introduces deoptimization paths. |
| 1250 if (!call->complete() && !FLAG_polymorphic_with_deopt) { | 1239 if (!call->complete() && !FLAG_polymorphic_with_deopt) { |
| 1251 TRACE_INLINING(THR_Print( | 1240 TRACE_INLINING( |
| 1252 " => %s\n Bailout: call with checks\n", | 1241 THR_Print(" => %s\n Bailout: call with checks\n", |
| 1253 call->instance_call()->function_name().ToCString())); | 1242 call->instance_call()->function_name().ToCString())); |
| 1254 continue; | 1243 continue; |
| 1255 } | 1244 } |
| 1256 const Function& cl = call_info[call_idx].caller(); | 1245 const Function& cl = call_info[call_idx].caller(); |
| 1257 intptr_t caller_inlining_id = | 1246 intptr_t caller_inlining_id = |
| 1258 call_info[call_idx].caller_graph->inlining_id(); | 1247 call_info[call_idx].caller_graph->inlining_id(); |
| 1259 PolymorphicInliner inliner(this, call, cl, caller_inlining_id); | 1248 PolymorphicInliner inliner(this, call, cl, caller_inlining_id); |
| 1260 inliner.Inline(); | 1249 inliner.Inline(); |
| 1261 continue; | 1250 continue; |
| 1262 } | 1251 } |
| 1263 | 1252 |
| 1264 const ICData& ic_data = call->ic_data(); | 1253 const ICData& ic_data = call->ic_data(); |
| 1265 const Function& target = Function::ZoneHandle(ic_data.GetTargetAt(0)); | 1254 const Function& target = Function::ZoneHandle(ic_data.GetTargetAt(0)); |
| 1266 if (!inliner_->AlwaysInline(target) && | 1255 if (!inliner_->AlwaysInline(target) && |
| 1267 (call_info[call_idx].ratio * 100) < FLAG_inlining_hotness) { | 1256 (call_info[call_idx].ratio * 100) < FLAG_inlining_hotness) { |
| 1268 TRACE_INLINING(THR_Print( | 1257 TRACE_INLINING( |
| 1269 " => %s (deopt count %d)\n Bailout: cold %f\n", | 1258 THR_Print(" => %s (deopt count %d)\n Bailout: cold %f\n", |
| 1270 target.ToCString(), | 1259 target.ToCString(), target.deoptimization_counter(), |
| 1271 target.deoptimization_counter(), | 1260 call_info[call_idx].ratio)); |
| 1272 call_info[call_idx].ratio)); | 1261 PRINT_INLINING_TREE("Too cold", &call_info[call_idx].caller(), &target, |
| 1273 PRINT_INLINING_TREE("Too cold", | 1262 call); |
| 1274 &call_info[call_idx].caller(), &target, call); | |
| 1275 continue; | 1263 continue; |
| 1276 } | 1264 } |
| 1277 GrowableArray<Value*> arguments(call->ArgumentCount()); | 1265 GrowableArray<Value*> arguments(call->ArgumentCount()); |
| 1278 for (int arg_i = 0; arg_i < call->ArgumentCount(); ++arg_i) { | 1266 for (int arg_i = 0; arg_i < call->ArgumentCount(); ++arg_i) { |
| 1279 arguments.Add(call->PushArgumentAt(arg_i)->value()); | 1267 arguments.Add(call->PushArgumentAt(arg_i)->value()); |
| 1280 } | 1268 } |
| 1281 InlinedCallData call_data( | 1269 InlinedCallData call_data( |
| 1282 call, &arguments, call_info[call_idx].caller(), | 1270 call, &arguments, call_info[call_idx].caller(), |
| 1283 call_info[call_idx].caller_graph->inlining_id()); | 1271 call_info[call_idx].caller_graph->inlining_id()); |
| 1284 if (TryInlining(target, | 1272 if (TryInlining(target, call->instance_call()->argument_names(), |
| 1285 call->instance_call()->argument_names(), | |
| 1286 &call_data)) { | 1273 &call_data)) { |
| 1287 InlineCall(&call_data); | 1274 InlineCall(&call_data); |
| 1288 } | 1275 } |
| 1289 } | 1276 } |
| 1290 } | 1277 } |
| 1291 | 1278 |
| 1292 bool AdjustForOptionalParameters(const ParsedFunction& parsed_function, | 1279 bool AdjustForOptionalParameters(const ParsedFunction& parsed_function, |
| 1293 const Array& argument_names, | 1280 const Array& argument_names, |
| 1294 GrowableArray<Value*>* arguments, | 1281 GrowableArray<Value*>* arguments, |
| 1295 ZoneGrowableArray<Definition*>* param_stubs, | 1282 ZoneGrowableArray<Definition*>* param_stubs, |
| (...skipping 15 matching lines...) Expand all Loading... |
| 1311 for (intptr_t i = fixed_param_count; i < arg_count; ++i) { | 1298 for (intptr_t i = fixed_param_count; i < arg_count; ++i) { |
| 1312 param_stubs->Add(CreateParameterStub(i, (*arguments)[i], callee_graph)); | 1299 param_stubs->Add(CreateParameterStub(i, (*arguments)[i], callee_graph)); |
| 1313 } | 1300 } |
| 1314 ASSERT(function.NumOptionalPositionalParameters() == | 1301 ASSERT(function.NumOptionalPositionalParameters() == |
| 1315 (param_count - fixed_param_count)); | 1302 (param_count - fixed_param_count)); |
| 1316 // For each optional positional parameter without an actual, add its | 1303 // For each optional positional parameter without an actual, add its |
| 1317 // default value. | 1304 // default value. |
| 1318 for (intptr_t i = arg_count; i < param_count; ++i) { | 1305 for (intptr_t i = arg_count; i < param_count; ++i) { |
| 1319 const Instance& object = | 1306 const Instance& object = |
| 1320 parsed_function.DefaultParameterValueAt(i - fixed_param_count); | 1307 parsed_function.DefaultParameterValueAt(i - fixed_param_count); |
| 1321 ConstantInstr* constant = new(Z) ConstantInstr(object); | 1308 ConstantInstr* constant = new (Z) ConstantInstr(object); |
| 1322 arguments->Add(NULL); | 1309 arguments->Add(NULL); |
| 1323 param_stubs->Add(constant); | 1310 param_stubs->Add(constant); |
| 1324 } | 1311 } |
| 1325 return true; | 1312 return true; |
| 1326 } | 1313 } |
| 1327 | 1314 |
| 1328 ASSERT(function.HasOptionalNamedParameters()); | 1315 ASSERT(function.HasOptionalNamedParameters()); |
| 1329 | 1316 |
| 1330 // Passed arguments must match fixed parameters plus named arguments. | 1317 // Passed arguments must match fixed parameters plus named arguments. |
| 1331 intptr_t argument_names_count = | 1318 intptr_t argument_names_count = |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1400 PolymorphicInstanceCallInstr* call, | 1387 PolymorphicInstanceCallInstr* call, |
| 1401 const Function& caller_function, | 1388 const Function& caller_function, |
| 1402 intptr_t caller_inlining_id) | 1389 intptr_t caller_inlining_id) |
| 1403 : owner_(owner), | 1390 : owner_(owner), |
| 1404 call_(call), | 1391 call_(call), |
| 1405 num_variants_(call->ic_data().NumberOfChecks()), | 1392 num_variants_(call->ic_data().NumberOfChecks()), |
| 1406 variants_(num_variants_), | 1393 variants_(num_variants_), |
| 1407 inlined_variants_(num_variants_), | 1394 inlined_variants_(num_variants_), |
| 1408 non_inlined_variants_(num_variants_), | 1395 non_inlined_variants_(num_variants_), |
| 1409 inlined_entries_(num_variants_), | 1396 inlined_entries_(num_variants_), |
| 1410 exit_collector_(new(Z) | 1397 exit_collector_(new (Z) InlineExitCollector(owner->caller_graph(), call)), |
| 1411 InlineExitCollector(owner->caller_graph(), call)), | |
| 1412 caller_function_(caller_function), | 1398 caller_function_(caller_function), |
| 1413 caller_inlining_id_(caller_inlining_id) { | 1399 caller_inlining_id_(caller_inlining_id) {} |
| 1414 } | |
| 1415 | 1400 |
| 1416 | 1401 |
| 1417 Isolate* PolymorphicInliner::isolate() const { | 1402 Isolate* PolymorphicInliner::isolate() const { |
| 1418 return owner_->caller_graph()->isolate(); | 1403 return owner_->caller_graph()->isolate(); |
| 1419 } | 1404 } |
| 1420 | 1405 |
| 1421 | 1406 |
| 1422 Zone* PolymorphicInliner::zone() const { | 1407 Zone* PolymorphicInliner::zone() const { |
| 1423 return owner_->caller_graph()->zone(); | 1408 return owner_->caller_graph()->zone(); |
| 1424 } | 1409 } |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1456 old_target->ReplaceAsPredecessorWith(new_join); | 1441 old_target->ReplaceAsPredecessorWith(new_join); |
| 1457 for (intptr_t j = 0; j < old_target->dominated_blocks().length(); ++j) { | 1442 for (intptr_t j = 0; j < old_target->dominated_blocks().length(); ++j) { |
| 1458 BlockEntryInstr* block = old_target->dominated_blocks()[j]; | 1443 BlockEntryInstr* block = old_target->dominated_blocks()[j]; |
| 1459 new_join->AddDominatedBlock(block); | 1444 new_join->AddDominatedBlock(block); |
| 1460 } | 1445 } |
| 1461 // Create a new target with the join as unconditional successor. | 1446 // Create a new target with the join as unconditional successor. |
| 1462 TargetEntryInstr* new_target = | 1447 TargetEntryInstr* new_target = |
| 1463 new TargetEntryInstr(owner_->caller_graph()->allocate_block_id(), | 1448 new TargetEntryInstr(owner_->caller_graph()->allocate_block_id(), |
| 1464 old_target->try_index()); | 1449 old_target->try_index()); |
| 1465 new_target->InheritDeoptTarget(zone(), new_join); | 1450 new_target->InheritDeoptTarget(zone(), new_join); |
| 1466 GotoInstr* new_goto = new(Z) GotoInstr(new_join); | 1451 GotoInstr* new_goto = new (Z) GotoInstr(new_join); |
| 1467 new_goto->InheritDeoptTarget(zone(), new_join); | 1452 new_goto->InheritDeoptTarget(zone(), new_join); |
| 1468 new_target->LinkTo(new_goto); | 1453 new_target->LinkTo(new_goto); |
| 1469 new_target->set_last_instruction(new_goto); | 1454 new_target->set_last_instruction(new_goto); |
| 1470 new_join->predecessors_.Add(new_target); | 1455 new_join->predecessors_.Add(new_target); |
| 1471 | 1456 |
| 1472 // Record the new target for the first variant. | 1457 // Record the new target for the first variant. |
| 1473 inlined_entries_[i] = new_target; | 1458 inlined_entries_[i] = new_target; |
| 1474 } | 1459 } |
| 1475 ASSERT(inlined_entries_[i]->IsTargetEntry()); | 1460 ASSERT(inlined_entries_[i]->IsTargetEntry()); |
| 1476 // Record the shared join for this variant. | 1461 // Record the shared join for this variant. |
| (...skipping 14 matching lines...) Expand all Loading... |
| 1491 if (target.raw() == non_inlined_variants_[i].target->raw()) { | 1476 if (target.raw() == non_inlined_variants_[i].target->raw()) { |
| 1492 return true; | 1477 return true; |
| 1493 } | 1478 } |
| 1494 } | 1479 } |
| 1495 | 1480 |
| 1496 return false; | 1481 return false; |
| 1497 } | 1482 } |
| 1498 | 1483 |
| 1499 | 1484 |
| 1500 bool PolymorphicInliner::TryInliningPoly(intptr_t receiver_cid, | 1485 bool PolymorphicInliner::TryInliningPoly(intptr_t receiver_cid, |
| 1501 const Function& target) { | 1486 const Function& target) { |
| 1502 if ((!FLAG_precompiled_mode || | 1487 if ((!FLAG_precompiled_mode || |
| 1503 owner_->inliner_->use_speculative_inlining()) && | 1488 owner_->inliner_->use_speculative_inlining()) && |
| 1504 TryInlineRecognizedMethod(receiver_cid, target)) { | 1489 TryInlineRecognizedMethod(receiver_cid, target)) { |
| 1505 owner_->inlined_ = true; | 1490 owner_->inlined_ = true; |
| 1506 return true; | 1491 return true; |
| 1507 } | 1492 } |
| 1508 | 1493 |
| 1509 GrowableArray<Value*> arguments(call_->ArgumentCount()); | 1494 GrowableArray<Value*> arguments(call_->ArgumentCount()); |
| 1510 for (int i = 0; i < call_->ArgumentCount(); ++i) { | 1495 for (int i = 0; i < call_->ArgumentCount(); ++i) { |
| 1511 arguments.Add(call_->PushArgumentAt(i)->value()); | 1496 arguments.Add(call_->PushArgumentAt(i)->value()); |
| 1512 } | 1497 } |
| 1513 InlinedCallData call_data(call_, &arguments, | 1498 InlinedCallData call_data(call_, &arguments, caller_function_, |
| 1514 caller_function_, | |
| 1515 caller_inlining_id_); | 1499 caller_inlining_id_); |
| 1516 if (!owner_->TryInlining(target, | 1500 if (!owner_->TryInlining(target, call_->instance_call()->argument_names(), |
| 1517 call_->instance_call()->argument_names(), | |
| 1518 &call_data)) { | 1501 &call_data)) { |
| 1519 return false; | 1502 return false; |
| 1520 } | 1503 } |
| 1521 | 1504 |
| 1522 FlowGraph* callee_graph = call_data.callee_graph; | 1505 FlowGraph* callee_graph = call_data.callee_graph; |
| 1523 call_data.exit_collector->PrepareGraphs(callee_graph); | 1506 call_data.exit_collector->PrepareGraphs(callee_graph); |
| 1524 inlined_entries_.Add(callee_graph->graph_entry()); | 1507 inlined_entries_.Add(callee_graph->graph_entry()); |
| 1525 exit_collector_->Union(call_data.exit_collector); | 1508 exit_collector_->Union(call_data.exit_collector); |
| 1526 | 1509 |
| 1527 // Replace parameter stubs and constants. Replace the receiver argument | 1510 // Replace parameter stubs and constants. Replace the receiver argument |
| 1528 // with a redefinition to prevent code from the inlined body from being | 1511 // with a redefinition to prevent code from the inlined body from being |
| 1529 // hoisted above the inlined entry. | 1512 // hoisted above the inlined entry. |
| 1530 ASSERT(arguments.length() > 0); | 1513 ASSERT(arguments.length() > 0); |
| 1531 Value* actual = arguments[0]; | 1514 Value* actual = arguments[0]; |
| 1532 RedefinitionInstr* redefinition = new(Z) | 1515 RedefinitionInstr* redefinition = new (Z) RedefinitionInstr(actual->Copy(Z)); |
| 1533 RedefinitionInstr(actual->Copy(Z)); | |
| 1534 redefinition->set_ssa_temp_index( | 1516 redefinition->set_ssa_temp_index( |
| 1535 owner_->caller_graph()->alloc_ssa_temp_index()); | 1517 owner_->caller_graph()->alloc_ssa_temp_index()); |
| 1536 redefinition->UpdateType(CompileType::FromCid(receiver_cid)); | 1518 redefinition->UpdateType(CompileType::FromCid(receiver_cid)); |
| 1537 redefinition->InsertAfter(callee_graph->graph_entry()->normal_entry()); | 1519 redefinition->InsertAfter(callee_graph->graph_entry()->normal_entry()); |
| 1538 Definition* stub = (*call_data.parameter_stubs)[0]; | 1520 Definition* stub = (*call_data.parameter_stubs)[0]; |
| 1539 stub->ReplaceUsesWith(redefinition); | 1521 stub->ReplaceUsesWith(redefinition); |
| 1540 | 1522 |
| 1541 for (intptr_t i = 1; i < arguments.length(); ++i) { | 1523 for (intptr_t i = 1; i < arguments.length(); ++i) { |
| 1542 actual = arguments[i]; | 1524 actual = arguments[i]; |
| 1543 if (actual != NULL) { | 1525 if (actual != NULL) { |
| 1544 stub = (*call_data.parameter_stubs)[i]; | 1526 stub = (*call_data.parameter_stubs)[i]; |
| 1545 stub->ReplaceUsesWith(actual->definition()); | 1527 stub->ReplaceUsesWith(actual->definition()); |
| 1546 } | 1528 } |
| 1547 } | 1529 } |
| 1548 GrowableArray<Definition*>* defns = | 1530 GrowableArray<Definition*>* defns = |
| 1549 callee_graph->graph_entry()->initial_definitions(); | 1531 callee_graph->graph_entry()->initial_definitions(); |
| 1550 for (intptr_t i = 0; i < defns->length(); ++i) { | 1532 for (intptr_t i = 0; i < defns->length(); ++i) { |
| 1551 ConstantInstr* constant = (*defns)[i]->AsConstant(); | 1533 ConstantInstr* constant = (*defns)[i]->AsConstant(); |
| 1552 if ((constant != NULL) && constant->HasUses()) { | 1534 if ((constant != NULL) && constant->HasUses()) { |
| 1553 constant->ReplaceUsesWith( | 1535 constant->ReplaceUsesWith( |
| 1554 owner_->caller_graph()->GetConstant(constant->value())); | 1536 owner_->caller_graph()->GetConstant(constant->value())); |
| 1555 } | 1537 } |
| 1556 CurrentContextInstr* context = (*defns)[i]->AsCurrentContext(); | 1538 CurrentContextInstr* context = (*defns)[i]->AsCurrentContext(); |
| 1557 if ((context != NULL) && context->HasUses()) { | 1539 if ((context != NULL) && context->HasUses()) { |
| 1558 ASSERT(call_data.call->IsClosureCall()); | 1540 ASSERT(call_data.call->IsClosureCall()); |
| 1559 LoadFieldInstr* context_load = new(Z) LoadFieldInstr( | 1541 LoadFieldInstr* context_load = new (Z) |
| 1560 new Value(redefinition), | 1542 LoadFieldInstr(new Value(redefinition), Closure::context_offset(), |
| 1561 Closure::context_offset(), | 1543 AbstractType::ZoneHandle(zone(), AbstractType::null()), |
| 1562 AbstractType::ZoneHandle(zone(), AbstractType::null()), | 1544 call_data.call->token_pos()); |
| 1563 call_data.call->token_pos()); | |
| 1564 context_load->set_is_immutable(true); | 1545 context_load->set_is_immutable(true); |
| 1565 context_load->set_ssa_temp_index( | 1546 context_load->set_ssa_temp_index( |
| 1566 owner_->caller_graph()->alloc_ssa_temp_index()); | 1547 owner_->caller_graph()->alloc_ssa_temp_index()); |
| 1567 context_load->InsertAfter(redefinition); | 1548 context_load->InsertAfter(redefinition); |
| 1568 context->ReplaceUsesWith(context_load); | 1549 context->ReplaceUsesWith(context_load); |
| 1569 } | 1550 } |
| 1570 } | 1551 } |
| 1571 return true; | 1552 return true; |
| 1572 } | 1553 } |
| 1573 | 1554 |
| 1574 | 1555 |
| 1575 static Instruction* AppendInstruction(Instruction* first, | 1556 static Instruction* AppendInstruction(Instruction* first, Instruction* second) { |
| 1576 Instruction* second) { | |
| 1577 for (intptr_t i = second->InputCount() - 1; i >= 0; --i) { | 1557 for (intptr_t i = second->InputCount() - 1; i >= 0; --i) { |
| 1578 Value* input = second->InputAt(i); | 1558 Value* input = second->InputAt(i); |
| 1579 input->definition()->AddInputUse(input); | 1559 input->definition()->AddInputUse(input); |
| 1580 } | 1560 } |
| 1581 first->LinkTo(second); | 1561 first->LinkTo(second); |
| 1582 return second; | 1562 return second; |
| 1583 } | 1563 } |
| 1584 | 1564 |
| 1585 | 1565 |
| 1586 bool PolymorphicInliner::TryInlineRecognizedMethod(intptr_t receiver_cid, | 1566 bool PolymorphicInliner::TryInlineRecognizedMethod(intptr_t receiver_cid, |
| 1587 const Function& target) { | 1567 const Function& target) { |
| 1588 TargetEntryInstr* entry; | 1568 TargetEntryInstr* entry; |
| 1589 Definition* last; | 1569 Definition* last; |
| 1590 // Replace the receiver argument with a redefinition to prevent code from | 1570 // Replace the receiver argument with a redefinition to prevent code from |
| 1591 // the inlined body from being hoisted above the inlined entry. | 1571 // the inlined body from being hoisted above the inlined entry. |
| 1592 GrowableArray<Definition*> arguments(call_->ArgumentCount()); | 1572 GrowableArray<Definition*> arguments(call_->ArgumentCount()); |
| 1593 Definition* receiver = call_->ArgumentAt(0); | 1573 Definition* receiver = call_->ArgumentAt(0); |
| 1594 RedefinitionInstr* redefinition = | 1574 RedefinitionInstr* redefinition = |
| 1595 new(Z) RedefinitionInstr(new(Z) Value(receiver)); | 1575 new (Z) RedefinitionInstr(new (Z) Value(receiver)); |
| 1596 redefinition->set_ssa_temp_index( | 1576 redefinition->set_ssa_temp_index( |
| 1597 owner_->caller_graph()->alloc_ssa_temp_index()); | 1577 owner_->caller_graph()->alloc_ssa_temp_index()); |
| 1598 if (FlowGraphInliner::TryInlineRecognizedMethod( | 1578 if (FlowGraphInliner::TryInlineRecognizedMethod( |
| 1599 owner_->caller_graph(), | 1579 owner_->caller_graph(), receiver_cid, target, call_, redefinition, |
| 1600 receiver_cid, | |
| 1601 target, | |
| 1602 call_, | |
| 1603 redefinition, | |
| 1604 call_->instance_call()->token_pos(), | 1580 call_->instance_call()->token_pos(), |
| 1605 *call_->instance_call()->ic_data(), | 1581 *call_->instance_call()->ic_data(), &entry, &last)) { |
| 1606 &entry, &last)) { | |
| 1607 // Create a graph fragment. | 1582 // Create a graph fragment. |
| 1608 redefinition->InsertAfter(entry); | 1583 redefinition->InsertAfter(entry); |
| 1609 InlineExitCollector* exit_collector = | 1584 InlineExitCollector* exit_collector = |
| 1610 new(Z) InlineExitCollector(owner_->caller_graph(), call_); | 1585 new (Z) InlineExitCollector(owner_->caller_graph(), call_); |
| 1611 | 1586 |
| 1612 ReturnInstr* result = | 1587 ReturnInstr* result = new (Z) |
| 1613 new(Z) ReturnInstr(call_->instance_call()->token_pos(), | 1588 ReturnInstr(call_->instance_call()->token_pos(), new (Z) Value(last)); |
| 1614 new(Z) Value(last)); | |
| 1615 owner_->caller_graph()->AppendTo( | 1589 owner_->caller_graph()->AppendTo( |
| 1616 last, | 1590 last, result, |
| 1617 result, | |
| 1618 call_->env(), // Return can become deoptimization target. | 1591 call_->env(), // Return can become deoptimization target. |
| 1619 FlowGraph::kEffect); | 1592 FlowGraph::kEffect); |
| 1620 entry->set_last_instruction(result); | 1593 entry->set_last_instruction(result); |
| 1621 exit_collector->AddExit(result); | 1594 exit_collector->AddExit(result); |
| 1622 ParsedFunction* temp_parsed_function = | 1595 ParsedFunction* temp_parsed_function = |
| 1623 new ParsedFunction(Thread::Current(), target); | 1596 new ParsedFunction(Thread::Current(), target); |
| 1624 GraphEntryInstr* graph_entry = | 1597 GraphEntryInstr* graph_entry = new (Z) |
| 1625 new(Z) GraphEntryInstr(*temp_parsed_function, | 1598 GraphEntryInstr(*temp_parsed_function, entry, Compiler::kNoOSRDeoptId); |
| 1626 entry, | |
| 1627 Compiler::kNoOSRDeoptId); | |
| 1628 // Update polymorphic inliner state. | 1599 // Update polymorphic inliner state. |
| 1629 inlined_entries_.Add(graph_entry); | 1600 inlined_entries_.Add(graph_entry); |
| 1630 exit_collector_->Union(exit_collector); | 1601 exit_collector_->Union(exit_collector); |
| 1631 return true; | 1602 return true; |
| 1632 } | 1603 } |
| 1633 return false; | 1604 return false; |
| 1634 } | 1605 } |
| 1635 | 1606 |
| 1636 | 1607 |
| 1637 // Build a DAG to dispatch to the inlined function bodies. Load the class | 1608 // Build a DAG to dispatch to the inlined function bodies. Load the class |
| 1638 // id of the receiver and make explicit comparisons for each inlined body, | 1609 // id of the receiver and make explicit comparisons for each inlined body, |
| 1639 // in frequency order. If all variants are inlined, the entry to the last | 1610 // in frequency order. If all variants are inlined, the entry to the last |
| 1640 // inlined body is guarded by a CheckClassId instruction which can deopt. | 1611 // inlined body is guarded by a CheckClassId instruction which can deopt. |
| 1641 // If not all variants are inlined, we add a PolymorphicInstanceCall | 1612 // If not all variants are inlined, we add a PolymorphicInstanceCall |
| 1642 // instruction to handle the non-inlined variants. | 1613 // instruction to handle the non-inlined variants. |
| 1643 TargetEntryInstr* PolymorphicInliner::BuildDecisionGraph() { | 1614 TargetEntryInstr* PolymorphicInliner::BuildDecisionGraph() { |
| 1644 // Start with a fresh target entry. | 1615 // Start with a fresh target entry. |
| 1645 TargetEntryInstr* entry = | 1616 TargetEntryInstr* entry = |
| 1646 new(Z) TargetEntryInstr( | 1617 new (Z) TargetEntryInstr(owner_->caller_graph()->allocate_block_id(), |
| 1647 owner_->caller_graph()->allocate_block_id(), | 1618 call_->GetBlock()->try_index()); |
| 1648 call_->GetBlock()->try_index()); | |
| 1649 entry->InheritDeoptTarget(zone(), call_); | 1619 entry->InheritDeoptTarget(zone(), call_); |
| 1650 | 1620 |
| 1651 // This function uses a cursor (a pointer to the 'current' instruction) to | 1621 // This function uses a cursor (a pointer to the 'current' instruction) to |
| 1652 // build the graph. The next instruction will be inserted after the | 1622 // build the graph. The next instruction will be inserted after the |
| 1653 // cursor. | 1623 // cursor. |
| 1654 TargetEntryInstr* current_block = entry; | 1624 TargetEntryInstr* current_block = entry; |
| 1655 Instruction* cursor = entry; | 1625 Instruction* cursor = entry; |
| 1656 | 1626 |
| 1657 Definition* receiver = call_->ArgumentAt(0); | 1627 Definition* receiver = call_->ArgumentAt(0); |
| 1658 // There are at least two variants including non-inlined ones, so we have | 1628 // There are at least two variants including non-inlined ones, so we have |
| 1659 // at least one branch on the class id. | 1629 // at least one branch on the class id. |
| 1660 LoadClassIdInstr* load_cid = | 1630 LoadClassIdInstr* load_cid = |
| 1661 new(Z) LoadClassIdInstr(new(Z) Value(receiver)); | 1631 new (Z) LoadClassIdInstr(new (Z) Value(receiver)); |
| 1662 load_cid->set_ssa_temp_index(owner_->caller_graph()->alloc_ssa_temp_index()); | 1632 load_cid->set_ssa_temp_index(owner_->caller_graph()->alloc_ssa_temp_index()); |
| 1663 cursor = AppendInstruction(cursor, load_cid); | 1633 cursor = AppendInstruction(cursor, load_cid); |
| 1664 for (intptr_t i = 0; i < inlined_variants_.length(); ++i) { | 1634 for (intptr_t i = 0; i < inlined_variants_.length(); ++i) { |
| 1665 // 1. Guard the body with a class id check. | 1635 // 1. Guard the body with a class id check. |
| 1666 if ((i == (inlined_variants_.length() - 1)) && | 1636 if ((i == (inlined_variants_.length() - 1)) && |
| 1667 non_inlined_variants_.is_empty()) { | 1637 non_inlined_variants_.is_empty()) { |
| 1668 // If it is the last variant use a check class id instruction which can | 1638 // If it is the last variant use a check class id instruction which can |
| 1669 // deoptimize, followed unconditionally by the body. Omit the check if | 1639 // deoptimize, followed unconditionally by the body. Omit the check if |
| 1670 // we know that we have covered all possible classes. | 1640 // we know that we have covered all possible classes. |
| 1671 if (!call_->complete()) { | 1641 if (!call_->complete()) { |
| 1672 RedefinitionInstr* cid_redefinition = | 1642 RedefinitionInstr* cid_redefinition = |
| 1673 new RedefinitionInstr(new(Z) Value(load_cid)); | 1643 new RedefinitionInstr(new (Z) Value(load_cid)); |
| 1674 cid_redefinition->set_ssa_temp_index( | 1644 cid_redefinition->set_ssa_temp_index( |
| 1675 owner_->caller_graph()->alloc_ssa_temp_index()); | 1645 owner_->caller_graph()->alloc_ssa_temp_index()); |
| 1676 cursor = AppendInstruction(cursor, cid_redefinition); | 1646 cursor = AppendInstruction(cursor, cid_redefinition); |
| 1677 CheckClassIdInstr* check_class_id = new(Z) CheckClassIdInstr( | 1647 CheckClassIdInstr* check_class_id = new (Z) |
| 1678 new(Z) Value(cid_redefinition), | 1648 CheckClassIdInstr(new (Z) Value(cid_redefinition), |
| 1679 inlined_variants_[i].cid, | 1649 inlined_variants_[i].cid, call_->deopt_id()); |
| 1680 call_->deopt_id()); | |
| 1681 check_class_id->InheritDeoptTarget(zone(), call_); | 1650 check_class_id->InheritDeoptTarget(zone(), call_); |
| 1682 cursor = AppendInstruction(cursor, check_class_id); | 1651 cursor = AppendInstruction(cursor, check_class_id); |
| 1683 } | 1652 } |
| 1684 | 1653 |
| 1685 // The next instruction is the first instruction of the inlined body. | 1654 // The next instruction is the first instruction of the inlined body. |
| 1686 // Handle the two possible cases (unshared and shared subsequent | 1655 // Handle the two possible cases (unshared and shared subsequent |
| 1687 // predecessors) separately. | 1656 // predecessors) separately. |
| 1688 BlockEntryInstr* callee_entry = inlined_entries_[i]; | 1657 BlockEntryInstr* callee_entry = inlined_entries_[i]; |
| 1689 if (callee_entry->IsGraphEntry()) { | 1658 if (callee_entry->IsGraphEntry()) { |
| 1690 // Unshared. Graft the normal entry on after the check class | 1659 // Unshared. Graft the normal entry on after the check class |
| 1691 // instruction. | 1660 // instruction. |
| 1692 TargetEntryInstr* target = | 1661 TargetEntryInstr* target = callee_entry->AsGraphEntry()->normal_entry(); |
| 1693 callee_entry->AsGraphEntry()->normal_entry(); | |
| 1694 cursor->LinkTo(target->next()); | 1662 cursor->LinkTo(target->next()); |
| 1695 target->ReplaceAsPredecessorWith(current_block); | 1663 target->ReplaceAsPredecessorWith(current_block); |
| 1696 // Unuse all inputs of the graph entry and the normal entry. They are | 1664 // Unuse all inputs of the graph entry and the normal entry. They are |
| 1697 // not in the graph anymore. | 1665 // not in the graph anymore. |
| 1698 callee_entry->UnuseAllInputs(); | 1666 callee_entry->UnuseAllInputs(); |
| 1699 target->UnuseAllInputs(); | 1667 target->UnuseAllInputs(); |
| 1700 // All blocks that were dominated by the normal entry are now | 1668 // All blocks that were dominated by the normal entry are now |
| 1701 // dominated by the current block. | 1669 // dominated by the current block. |
| 1702 for (intptr_t j = 0; | 1670 for (intptr_t j = 0; j < target->dominated_blocks().length(); ++j) { |
| 1703 j < target->dominated_blocks().length(); | |
| 1704 ++j) { | |
| 1705 BlockEntryInstr* block = target->dominated_blocks()[j]; | 1671 BlockEntryInstr* block = target->dominated_blocks()[j]; |
| 1706 current_block->AddDominatedBlock(block); | 1672 current_block->AddDominatedBlock(block); |
| 1707 } | 1673 } |
| 1708 } else if (callee_entry->IsJoinEntry()) { | 1674 } else if (callee_entry->IsJoinEntry()) { |
| 1709 // Shared inlined body and this is a subsequent entry. We have | 1675 // Shared inlined body and this is a subsequent entry. We have |
| 1710 // already constructed a join and set its dominator. Add a jump to | 1676 // already constructed a join and set its dominator. Add a jump to |
| 1711 // the join. | 1677 // the join. |
| 1712 JoinEntryInstr* join = callee_entry->AsJoinEntry(); | 1678 JoinEntryInstr* join = callee_entry->AsJoinEntry(); |
| 1713 ASSERT(join->dominator() != NULL); | 1679 ASSERT(join->dominator() != NULL); |
| 1714 GotoInstr* goto_join = new GotoInstr(join); | 1680 GotoInstr* goto_join = new GotoInstr(join); |
| 1715 goto_join->InheritDeoptTarget(zone(), join); | 1681 goto_join->InheritDeoptTarget(zone(), join); |
| 1716 cursor->LinkTo(goto_join); | 1682 cursor->LinkTo(goto_join); |
| 1717 current_block->set_last_instruction(goto_join); | 1683 current_block->set_last_instruction(goto_join); |
| 1718 } else { | 1684 } else { |
| 1719 // There is no possibility of a TargetEntry (the first entry to a | 1685 // There is no possibility of a TargetEntry (the first entry to a |
| 1720 // shared inlined body) because this is the last inlined entry. | 1686 // shared inlined body) because this is the last inlined entry. |
| 1721 UNREACHABLE(); | 1687 UNREACHABLE(); |
| 1722 } | 1688 } |
| 1723 cursor = NULL; | 1689 cursor = NULL; |
| 1724 } else { | 1690 } else { |
| 1725 // For all variants except the last, use a branch on the loaded class | 1691 // For all variants except the last, use a branch on the loaded class |
| 1726 // id. | 1692 // id. |
| 1727 const Smi& cid = Smi::ZoneHandle(Smi::New(inlined_variants_[i].cid)); | 1693 const Smi& cid = Smi::ZoneHandle(Smi::New(inlined_variants_[i].cid)); |
| 1728 ConstantInstr* cid_constant = new ConstantInstr(cid); | 1694 ConstantInstr* cid_constant = new ConstantInstr(cid); |
| 1729 cid_constant->set_ssa_temp_index( | 1695 cid_constant->set_ssa_temp_index( |
| 1730 owner_->caller_graph()->alloc_ssa_temp_index()); | 1696 owner_->caller_graph()->alloc_ssa_temp_index()); |
| 1731 StrictCompareInstr* compare = | 1697 StrictCompareInstr* compare = new StrictCompareInstr( |
| 1732 new StrictCompareInstr(call_->instance_call()->token_pos(), | 1698 call_->instance_call()->token_pos(), Token::kEQ_STRICT, |
| 1733 Token::kEQ_STRICT, | 1699 new Value(load_cid), new Value(cid_constant), |
| 1734 new Value(load_cid), | 1700 false); // No number check. |
| 1735 new Value(cid_constant), | |
| 1736 false); // No number check. | |
| 1737 BranchInstr* branch = new BranchInstr(compare); | 1701 BranchInstr* branch = new BranchInstr(compare); |
| 1738 branch->InheritDeoptTarget(zone(), call_); | 1702 branch->InheritDeoptTarget(zone(), call_); |
| 1739 AppendInstruction(AppendInstruction(cursor, cid_constant), branch); | 1703 AppendInstruction(AppendInstruction(cursor, cid_constant), branch); |
| 1740 current_block->set_last_instruction(branch); | 1704 current_block->set_last_instruction(branch); |
| 1741 cursor = NULL; | 1705 cursor = NULL; |
| 1742 | 1706 |
| 1743 // 2. Handle a match by linking to the inlined body. There are three | 1707 // 2. Handle a match by linking to the inlined body. There are three |
| 1744 // cases (unshared, shared first predecessor, and shared subsequent | 1708 // cases (unshared, shared first predecessor, and shared subsequent |
| 1745 // predecessors). | 1709 // predecessors). |
| 1746 BlockEntryInstr* callee_entry = inlined_entries_[i]; | 1710 BlockEntryInstr* callee_entry = inlined_entries_[i]; |
| 1747 TargetEntryInstr* true_target = NULL; | 1711 TargetEntryInstr* true_target = NULL; |
| 1748 if (callee_entry->IsGraphEntry()) { | 1712 if (callee_entry->IsGraphEntry()) { |
| 1749 // Unshared. | 1713 // Unshared. |
| 1750 true_target = callee_entry->AsGraphEntry()->normal_entry(); | 1714 true_target = callee_entry->AsGraphEntry()->normal_entry(); |
| 1751 // Unuse all inputs of the graph entry. It is not in the graph anymore. | 1715 // Unuse all inputs of the graph entry. It is not in the graph anymore. |
| 1752 callee_entry->UnuseAllInputs(); | 1716 callee_entry->UnuseAllInputs(); |
| 1753 } else if (callee_entry->IsTargetEntry()) { | 1717 } else if (callee_entry->IsTargetEntry()) { |
| 1754 // Shared inlined body and this is the first entry. We have already | 1718 // Shared inlined body and this is the first entry. We have already |
| 1755 // constructed a join and this target jumps to it. | 1719 // constructed a join and this target jumps to it. |
| 1756 true_target = callee_entry->AsTargetEntry(); | 1720 true_target = callee_entry->AsTargetEntry(); |
| 1757 BlockEntryInstr* join = | 1721 BlockEntryInstr* join = true_target->last_instruction()->SuccessorAt(0); |
| 1758 true_target->last_instruction()->SuccessorAt(0); | |
| 1759 current_block->AddDominatedBlock(join); | 1722 current_block->AddDominatedBlock(join); |
| 1760 } else { | 1723 } else { |
| 1761 // Shared inlined body and this is a subsequent entry. We have | 1724 // Shared inlined body and this is a subsequent entry. We have |
| 1762 // already constructed a join. We need a fresh target that jumps to | 1725 // already constructed a join. We need a fresh target that jumps to |
| 1763 // the join. | 1726 // the join. |
| 1764 JoinEntryInstr* join = callee_entry->AsJoinEntry(); | 1727 JoinEntryInstr* join = callee_entry->AsJoinEntry(); |
| 1765 ASSERT(join != NULL); | 1728 ASSERT(join != NULL); |
| 1766 ASSERT(join->dominator() != NULL); | 1729 ASSERT(join->dominator() != NULL); |
| 1767 true_target = | 1730 true_target = |
| 1768 new TargetEntryInstr(owner_->caller_graph()->allocate_block_id(), | 1731 new TargetEntryInstr(owner_->caller_graph()->allocate_block_id(), |
| (...skipping 23 matching lines...) Expand all Loading... |
| 1792 if (!non_inlined_variants_.is_empty()) { | 1755 if (!non_inlined_variants_.is_empty()) { |
| 1793 // Move push arguments of the call. | 1756 // Move push arguments of the call. |
| 1794 for (intptr_t i = 0; i < call_->ArgumentCount(); ++i) { | 1757 for (intptr_t i = 0; i < call_->ArgumentCount(); ++i) { |
| 1795 PushArgumentInstr* push = call_->PushArgumentAt(i); | 1758 PushArgumentInstr* push = call_->PushArgumentAt(i); |
| 1796 push->ReplaceUsesWith(push->value()->definition()); | 1759 push->ReplaceUsesWith(push->value()->definition()); |
| 1797 push->previous()->LinkTo(push->next()); | 1760 push->previous()->LinkTo(push->next()); |
| 1798 cursor->LinkTo(push); | 1761 cursor->LinkTo(push); |
| 1799 cursor = push; | 1762 cursor = push; |
| 1800 } | 1763 } |
| 1801 const ICData& old_checks = call_->ic_data(); | 1764 const ICData& old_checks = call_->ic_data(); |
| 1802 const ICData& new_checks = ICData::ZoneHandle( | 1765 const ICData& new_checks = ICData::ZoneHandle(ICData::New( |
| 1803 ICData::New(Function::Handle(old_checks.Owner()), | 1766 Function::Handle(old_checks.Owner()), |
| 1804 String::Handle(old_checks.target_name()), | 1767 String::Handle(old_checks.target_name()), |
| 1805 Array::Handle(old_checks.arguments_descriptor()), | 1768 Array::Handle(old_checks.arguments_descriptor()), old_checks.deopt_id(), |
| 1806 old_checks.deopt_id(), | 1769 1, // Number of args tested. |
| 1807 1, // Number of args tested. | 1770 false)); // is_static_call |
| 1808 false)); // is_static_call | |
| 1809 for (intptr_t i = 0; i < non_inlined_variants_.length(); ++i) { | 1771 for (intptr_t i = 0; i < non_inlined_variants_.length(); ++i) { |
| 1810 new_checks.AddReceiverCheck(non_inlined_variants_[i].cid, | 1772 new_checks.AddReceiverCheck(non_inlined_variants_[i].cid, |
| 1811 *non_inlined_variants_[i].target, | 1773 *non_inlined_variants_[i].target, |
| 1812 non_inlined_variants_[i].count); | 1774 non_inlined_variants_[i].count); |
| 1813 } | 1775 } |
| 1814 PolymorphicInstanceCallInstr* fallback_call = | 1776 PolymorphicInstanceCallInstr* fallback_call = |
| 1815 new PolymorphicInstanceCallInstr(call_->instance_call(), | 1777 new PolymorphicInstanceCallInstr(call_->instance_call(), new_checks, |
| 1816 new_checks, | |
| 1817 /* with_checks = */ true, | 1778 /* with_checks = */ true, |
| 1818 call_->complete()); | 1779 call_->complete()); |
| 1819 fallback_call->set_ssa_temp_index( | 1780 fallback_call->set_ssa_temp_index( |
| 1820 owner_->caller_graph()->alloc_ssa_temp_index()); | 1781 owner_->caller_graph()->alloc_ssa_temp_index()); |
| 1821 fallback_call->InheritDeoptTarget(zone(), call_); | 1782 fallback_call->InheritDeoptTarget(zone(), call_); |
| 1822 ReturnInstr* fallback_return = | 1783 ReturnInstr* fallback_return = new ReturnInstr( |
| 1823 new ReturnInstr(call_->instance_call()->token_pos(), | 1784 call_->instance_call()->token_pos(), new Value(fallback_call)); |
| 1824 new Value(fallback_call)); | 1785 fallback_return->InheritDeoptTargetAfter(owner_->caller_graph(), call_, |
| 1825 fallback_return->InheritDeoptTargetAfter( | 1786 fallback_call); |
| 1826 owner_->caller_graph(), | |
| 1827 call_, | |
| 1828 fallback_call); | |
| 1829 AppendInstruction(AppendInstruction(cursor, fallback_call), | 1787 AppendInstruction(AppendInstruction(cursor, fallback_call), |
| 1830 fallback_return); | 1788 fallback_return); |
| 1831 exit_collector_->AddExit(fallback_return); | 1789 exit_collector_->AddExit(fallback_return); |
| 1832 cursor = NULL; | 1790 cursor = NULL; |
| 1833 } else { | 1791 } else { |
| 1834 // Remove push arguments of the call. | 1792 // Remove push arguments of the call. |
| 1835 for (intptr_t i = 0; i < call_->ArgumentCount(); ++i) { | 1793 for (intptr_t i = 0; i < call_->ArgumentCount(); ++i) { |
| 1836 PushArgumentInstr* push = call_->PushArgumentAt(i); | 1794 PushArgumentInstr* push = call_->PushArgumentAt(i); |
| 1837 push->ReplaceUsesWith(push->value()->definition()); | 1795 push->ReplaceUsesWith(push->value()->definition()); |
| 1838 push->RemoveFromGraph(); | 1796 push->RemoveFromGraph(); |
| 1839 } | 1797 } |
| 1840 } | 1798 } |
| 1841 return entry; | 1799 return entry; |
| 1842 } | 1800 } |
| 1843 | 1801 |
| 1844 | 1802 |
| 1845 void PolymorphicInliner::Inline() { | 1803 void PolymorphicInliner::Inline() { |
| 1846 // Consider the polymorphic variants in order by frequency. | 1804 // Consider the polymorphic variants in order by frequency. |
| 1847 FlowGraphCompiler::SortICDataByCount(call_->ic_data(), | 1805 FlowGraphCompiler::SortICDataByCount(call_->ic_data(), &variants_, |
| 1848 &variants_, | |
| 1849 /* drop_smi = */ false); | 1806 /* drop_smi = */ false); |
| 1850 for (intptr_t var_idx = 0; var_idx < variants_.length(); ++var_idx) { | 1807 for (intptr_t var_idx = 0; var_idx < variants_.length(); ++var_idx) { |
| 1851 const Function& target = *variants_[var_idx].target; | 1808 const Function& target = *variants_[var_idx].target; |
| 1852 const intptr_t receiver_cid = variants_[var_idx].cid; | 1809 const intptr_t receiver_cid = variants_[var_idx].cid; |
| 1853 | 1810 |
| 1854 // First check if this is the same target as an earlier inlined variant. | 1811 // First check if this is the same target as an earlier inlined variant. |
| 1855 if (CheckInlinedDuplicate(target)) { | 1812 if (CheckInlinedDuplicate(target)) { |
| 1856 inlined_variants_.Add(variants_[var_idx]); | 1813 inlined_variants_.Add(variants_[var_idx]); |
| 1857 continue; | 1814 continue; |
| 1858 } | 1815 } |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1928 | 1885 |
| 1929 | 1886 |
| 1930 // TODO(srdjan): This is only needed when disassembling and/or profiling. | 1887 // TODO(srdjan): This is only needed when disassembling and/or profiling. |
| 1931 // Sets inlining id for all instructions of this flow-graph, as well for the | 1888 // Sets inlining id for all instructions of this flow-graph, as well for the |
| 1932 // FlowGraph itself. | 1889 // FlowGraph itself. |
| 1933 void FlowGraphInliner::SetInliningId(FlowGraph* flow_graph, | 1890 void FlowGraphInliner::SetInliningId(FlowGraph* flow_graph, |
| 1934 intptr_t inlining_id) { | 1891 intptr_t inlining_id) { |
| 1935 ASSERT(flow_graph->inlining_id() < 0); | 1892 ASSERT(flow_graph->inlining_id() < 0); |
| 1936 flow_graph->set_inlining_id(inlining_id); | 1893 flow_graph->set_inlining_id(inlining_id); |
| 1937 for (BlockIterator block_it = flow_graph->postorder_iterator(); | 1894 for (BlockIterator block_it = flow_graph->postorder_iterator(); |
| 1938 !block_it.Done(); | 1895 !block_it.Done(); block_it.Advance()) { |
| 1939 block_it.Advance()) { | 1896 for (ForwardInstructionIterator it(block_it.Current()); !it.Done(); |
| 1940 for (ForwardInstructionIterator it(block_it.Current()); | |
| 1941 !it.Done(); | |
| 1942 it.Advance()) { | 1897 it.Advance()) { |
| 1943 Instruction* current = it.Current(); | 1898 Instruction* current = it.Current(); |
| 1944 // Do not overwrite owner function. | 1899 // Do not overwrite owner function. |
| 1945 ASSERT(!current->has_inlining_id()); | 1900 ASSERT(!current->has_inlining_id()); |
| 1946 current->set_inlining_id(inlining_id); | 1901 current->set_inlining_id(inlining_id); |
| 1947 } | 1902 } |
| 1948 } | 1903 } |
| 1949 } | 1904 } |
| 1950 | 1905 |
| 1951 | 1906 |
| 1952 // Use function name to determine if inlineable operator. | 1907 // Use function name to determine if inlineable operator. |
| 1953 // Add names as necessary. | 1908 // Add names as necessary. |
| 1954 static bool IsInlineableOperator(const Function& function) { | 1909 static bool IsInlineableOperator(const Function& function) { |
| 1955 return (function.name() == Symbols::IndexToken().raw()) || | 1910 return (function.name() == Symbols::IndexToken().raw()) || |
| 1956 (function.name() == Symbols::AssignIndexToken().raw()) || | 1911 (function.name() == Symbols::AssignIndexToken().raw()) || |
| 1957 (function.name() == Symbols::Plus().raw()) || | 1912 (function.name() == Symbols::Plus().raw()) || |
| 1958 (function.name() == Symbols::Minus().raw()); | 1913 (function.name() == Symbols::Minus().raw()); |
| 1959 } | 1914 } |
| 1960 | 1915 |
| 1961 | 1916 |
| 1962 bool FlowGraphInliner::AlwaysInline(const Function& function) { | 1917 bool FlowGraphInliner::AlwaysInline(const Function& function) { |
| 1963 const char* kAlwaysInlineAnnotation = "AlwaysInline"; | 1918 const char* kAlwaysInlineAnnotation = "AlwaysInline"; |
| 1964 if (FLAG_enable_inlining_annotations && | 1919 if (FLAG_enable_inlining_annotations && |
| 1965 HasAnnotation(function, kAlwaysInlineAnnotation)) { | 1920 HasAnnotation(function, kAlwaysInlineAnnotation)) { |
| 1966 TRACE_INLINING(THR_Print("AlwaysInline annotation for %s\n", | 1921 TRACE_INLINING( |
| 1967 function.ToCString())); | 1922 THR_Print("AlwaysInline annotation for %s\n", function.ToCString())); |
| 1968 return true; | 1923 return true; |
| 1969 } | 1924 } |
| 1970 | 1925 |
| 1971 if (function.IsImplicitGetterFunction() || function.IsGetterFunction() || | 1926 if (function.IsImplicitGetterFunction() || function.IsGetterFunction() || |
| 1972 function.IsImplicitSetterFunction() || function.IsSetterFunction() || | 1927 function.IsImplicitSetterFunction() || function.IsSetterFunction() || |
| 1973 IsInlineableOperator(function) || | 1928 IsInlineableOperator(function) || |
| 1974 (function.kind() == RawFunction::kConstructor)) { | 1929 (function.kind() == RawFunction::kConstructor)) { |
| 1975 const intptr_t count = function.optimized_instruction_count(); | 1930 const intptr_t count = function.optimized_instruction_count(); |
| 1976 if ((count != 0) && (count < FLAG_inline_getters_setters_smaller_than)) { | 1931 if ((count != 0) && (count < FLAG_inline_getters_setters_smaller_than)) { |
| 1977 return true; | 1932 return true; |
| 1978 } | 1933 } |
| 1979 } | 1934 } |
| 1980 return MethodRecognizer::AlwaysInline(function); | 1935 return MethodRecognizer::AlwaysInline(function); |
| 1981 } | 1936 } |
| 1982 | 1937 |
| 1983 | 1938 |
| 1984 void FlowGraphInliner::Inline() { | 1939 void FlowGraphInliner::Inline() { |
| 1985 // Collect graph info and store it on the function. | 1940 // Collect graph info and store it on the function. |
| 1986 // We might later use it for an early bailout from the inlining. | 1941 // We might later use it for an early bailout from the inlining. |
| 1987 CollectGraphInfo(flow_graph_); | 1942 CollectGraphInfo(flow_graph_); |
| 1988 | 1943 |
| 1989 const Function& top = flow_graph_->function(); | 1944 const Function& top = flow_graph_->function(); |
| 1990 if ((FLAG_inlining_filter != NULL) && | 1945 if ((FLAG_inlining_filter != NULL) && |
| 1991 (strstr(top.ToFullyQualifiedCString(), FLAG_inlining_filter) == NULL)) { | 1946 (strstr(top.ToFullyQualifiedCString(), FLAG_inlining_filter) == NULL)) { |
| 1992 return; | 1947 return; |
| 1993 } | 1948 } |
| 1994 | 1949 |
| 1995 TRACE_INLINING(THR_Print("Inlining calls in %s\n", top.ToCString())); | 1950 TRACE_INLINING(THR_Print("Inlining calls in %s\n", top.ToCString())); |
| 1996 | 1951 |
| 1997 if (FLAG_support_il_printer && | 1952 if (FLAG_support_il_printer && trace_inlining() && |
| 1998 trace_inlining() && | |
| 1999 (FLAG_print_flow_graph || FLAG_print_flow_graph_optimized)) { | 1953 (FLAG_print_flow_graph || FLAG_print_flow_graph_optimized)) { |
| 2000 THR_Print("Before Inlining of %s\n", flow_graph_-> | 1954 THR_Print("Before Inlining of %s\n", |
| 2001 function().ToFullyQualifiedCString()); | 1955 flow_graph_->function().ToFullyQualifiedCString()); |
| 2002 FlowGraphPrinter printer(*flow_graph_); | 1956 FlowGraphPrinter printer(*flow_graph_); |
| 2003 printer.PrintBlocks(); | 1957 printer.PrintBlocks(); |
| 2004 } | 1958 } |
| 2005 | 1959 |
| 2006 CallSiteInliner inliner(this); | 1960 CallSiteInliner inliner(this); |
| 2007 inliner.InlineCalls(); | 1961 inliner.InlineCalls(); |
| 2008 if (FLAG_print_inlining_tree) { | 1962 if (FLAG_print_inlining_tree) { |
| 2009 inliner.PrintInlinedInfo(top); | 1963 inliner.PrintInlinedInfo(top); |
| 2010 } | 1964 } |
| 2011 | 1965 |
| 2012 if (inliner.inlined()) { | 1966 if (inliner.inlined()) { |
| 2013 flow_graph_->DiscoverBlocks(); | 1967 flow_graph_->DiscoverBlocks(); |
| 2014 if (trace_inlining()) { | 1968 if (trace_inlining()) { |
| 2015 THR_Print("Inlining growth factor: %f\n", inliner.GrowthFactor()); | 1969 THR_Print("Inlining growth factor: %f\n", inliner.GrowthFactor()); |
| 2016 if (FLAG_support_il_printer && | 1970 if (FLAG_support_il_printer && |
| 2017 (FLAG_print_flow_graph || FLAG_print_flow_graph_optimized)) { | 1971 (FLAG_print_flow_graph || FLAG_print_flow_graph_optimized)) { |
| 2018 THR_Print("After Inlining of %s\n", flow_graph_-> | 1972 THR_Print("After Inlining of %s\n", |
| 2019 function().ToFullyQualifiedCString()); | 1973 flow_graph_->function().ToFullyQualifiedCString()); |
| 2020 FlowGraphPrinter printer(*flow_graph_); | 1974 FlowGraphPrinter printer(*flow_graph_); |
| 2021 printer.PrintBlocks(); | 1975 printer.PrintBlocks(); |
| 2022 } | 1976 } |
| 2023 } | 1977 } |
| 2024 } | 1978 } |
| 2025 } | 1979 } |
| 2026 | 1980 |
| 2027 | 1981 |
| 2028 intptr_t FlowGraphInliner::NextInlineId(const Function& function, | 1982 intptr_t FlowGraphInliner::NextInlineId(const Function& function, |
| 2029 TokenPosition tp, | 1983 TokenPosition tp, |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2072 #undef Z | 2026 #undef Z |
| 2073 #define Z (flow_graph->zone()) | 2027 #define Z (flow_graph->zone()) |
| 2074 | 2028 |
| 2075 static intptr_t PrepareInlineIndexedOp(FlowGraph* flow_graph, | 2029 static intptr_t PrepareInlineIndexedOp(FlowGraph* flow_graph, |
| 2076 Instruction* call, | 2030 Instruction* call, |
| 2077 intptr_t array_cid, | 2031 intptr_t array_cid, |
| 2078 Definition** array, | 2032 Definition** array, |
| 2079 Definition* index, | 2033 Definition* index, |
| 2080 Instruction** cursor) { | 2034 Instruction** cursor) { |
| 2081 // Insert array length load and bounds check. | 2035 // Insert array length load and bounds check. |
| 2082 LoadFieldInstr* length = | 2036 LoadFieldInstr* length = new (Z) LoadFieldInstr( |
| 2083 new(Z) LoadFieldInstr( | 2037 new (Z) Value(*array), CheckArrayBoundInstr::LengthOffsetFor(array_cid), |
| 2084 new(Z) Value(*array), | 2038 Type::ZoneHandle(Z, Type::SmiType()), call->token_pos()); |
| 2085 CheckArrayBoundInstr::LengthOffsetFor(array_cid), | |
| 2086 Type::ZoneHandle(Z, Type::SmiType()), | |
| 2087 call->token_pos()); | |
| 2088 length->set_is_immutable( | 2039 length->set_is_immutable( |
| 2089 CheckArrayBoundInstr::IsFixedLengthArrayType(array_cid)); | 2040 CheckArrayBoundInstr::IsFixedLengthArrayType(array_cid)); |
| 2090 length->set_result_cid(kSmiCid); | 2041 length->set_result_cid(kSmiCid); |
| 2091 length->set_recognized_kind( | 2042 length->set_recognized_kind( |
| 2092 LoadFieldInstr::RecognizedKindFromArrayCid(array_cid)); | 2043 LoadFieldInstr::RecognizedKindFromArrayCid(array_cid)); |
| 2093 *cursor = flow_graph->AppendTo(*cursor, | 2044 *cursor = flow_graph->AppendTo(*cursor, length, NULL, FlowGraph::kValue); |
| 2094 length, | |
| 2095 NULL, | |
| 2096 FlowGraph::kValue); | |
| 2097 | 2045 |
| 2098 *cursor = flow_graph->AppendTo(*cursor, | 2046 *cursor = flow_graph->AppendTo( |
| 2099 new(Z) CheckArrayBoundInstr( | 2047 *cursor, |
| 2100 new(Z) Value(length), | 2048 new (Z) CheckArrayBoundInstr(new (Z) Value(length), new (Z) Value(index), |
| 2101 new(Z) Value(index), | 2049 call->deopt_id()), |
| 2102 call->deopt_id()), | 2050 call->env(), FlowGraph::kEffect); |
| 2103 call->env(), | |
| 2104 FlowGraph::kEffect); | |
| 2105 | 2051 |
| 2106 if (array_cid == kGrowableObjectArrayCid) { | 2052 if (array_cid == kGrowableObjectArrayCid) { |
| 2107 // Insert data elements load. | 2053 // Insert data elements load. |
| 2108 LoadFieldInstr* elements = | 2054 LoadFieldInstr* elements = new (Z) LoadFieldInstr( |
| 2109 new(Z) LoadFieldInstr( | 2055 new (Z) Value(*array), GrowableObjectArray::data_offset(), |
| 2110 new(Z) Value(*array), | 2056 Object::dynamic_type(), call->token_pos()); |
| 2111 GrowableObjectArray::data_offset(), | |
| 2112 Object::dynamic_type(), | |
| 2113 call->token_pos()); | |
| 2114 elements->set_result_cid(kArrayCid); | 2057 elements->set_result_cid(kArrayCid); |
| 2115 *cursor = flow_graph->AppendTo(*cursor, | 2058 *cursor = flow_graph->AppendTo(*cursor, elements, NULL, FlowGraph::kValue); |
| 2116 elements, | |
| 2117 NULL, | |
| 2118 FlowGraph::kValue); | |
| 2119 // Load from the data from backing store which is a fixed-length array. | 2059 // Load from the data from backing store which is a fixed-length array. |
| 2120 *array = elements; | 2060 *array = elements; |
| 2121 array_cid = kArrayCid; | 2061 array_cid = kArrayCid; |
| 2122 } else if (RawObject::IsExternalTypedDataClassId(array_cid)) { | 2062 } else if (RawObject::IsExternalTypedDataClassId(array_cid)) { |
| 2123 LoadUntaggedInstr* elements = | 2063 LoadUntaggedInstr* elements = new (Z) LoadUntaggedInstr( |
| 2124 new(Z) LoadUntaggedInstr(new(Z) Value(*array), | 2064 new (Z) Value(*array), ExternalTypedData::data_offset()); |
| 2125 ExternalTypedData::data_offset()); | 2065 *cursor = flow_graph->AppendTo(*cursor, elements, NULL, FlowGraph::kValue); |
| 2126 *cursor = flow_graph->AppendTo(*cursor, | |
| 2127 elements, | |
| 2128 NULL, | |
| 2129 FlowGraph::kValue); | |
| 2130 *array = elements; | 2066 *array = elements; |
| 2131 } | 2067 } |
| 2132 return array_cid; | 2068 return array_cid; |
| 2133 } | 2069 } |
| 2134 | 2070 |
| 2135 | 2071 |
| 2136 static Instruction* GetCheckClass(FlowGraph* flow_graph, | 2072 static Instruction* GetCheckClass(FlowGraph* flow_graph, |
| 2137 Definition* to_check, | 2073 Definition* to_check, |
| 2138 const ICData& unary_checks, | 2074 const ICData& unary_checks, |
| 2139 intptr_t deopt_id, | 2075 intptr_t deopt_id, |
| 2140 TokenPosition token_pos) { | 2076 TokenPosition token_pos) { |
| 2141 if ((unary_checks.NumberOfUsedChecks() == 1) && | 2077 if ((unary_checks.NumberOfUsedChecks() == 1) && |
| 2142 unary_checks.HasReceiverClassId(kSmiCid)) { | 2078 unary_checks.HasReceiverClassId(kSmiCid)) { |
| 2143 return new(Z) CheckSmiInstr(new(Z) Value(to_check), | 2079 return new (Z) CheckSmiInstr(new (Z) Value(to_check), deopt_id, token_pos); |
| 2144 deopt_id, | |
| 2145 token_pos); | |
| 2146 } | 2080 } |
| 2147 return new(Z) CheckClassInstr( | 2081 return new (Z) CheckClassInstr(new (Z) Value(to_check), deopt_id, |
| 2148 new(Z) Value(to_check), deopt_id, unary_checks, token_pos); | 2082 unary_checks, token_pos); |
| 2149 } | 2083 } |
| 2150 | 2084 |
| 2151 | 2085 |
| 2152 static bool InlineGetIndexed(FlowGraph* flow_graph, | 2086 static bool InlineGetIndexed(FlowGraph* flow_graph, |
| 2153 MethodRecognizer::Kind kind, | 2087 MethodRecognizer::Kind kind, |
| 2154 Instruction* call, | 2088 Instruction* call, |
| 2155 Definition* receiver, | 2089 Definition* receiver, |
| 2156 TargetEntryInstr** entry, | 2090 TargetEntryInstr** entry, |
| 2157 Definition** last) { | 2091 Definition** last) { |
| 2158 intptr_t array_cid = MethodRecognizer::MethodKindToReceiverCid(kind); | 2092 intptr_t array_cid = MethodRecognizer::MethodKindToReceiverCid(kind); |
| 2159 | 2093 |
| 2160 Definition* array = receiver; | 2094 Definition* array = receiver; |
| 2161 Definition* index = call->ArgumentAt(1); | 2095 Definition* index = call->ArgumentAt(1); |
| 2162 *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(), | 2096 *entry = new (Z) TargetEntryInstr(flow_graph->allocate_block_id(), |
| 2163 call->GetBlock()->try_index()); | 2097 call->GetBlock()->try_index()); |
| 2164 (*entry)->InheritDeoptTarget(Z, call); | 2098 (*entry)->InheritDeoptTarget(Z, call); |
| 2165 Instruction* cursor = *entry; | 2099 Instruction* cursor = *entry; |
| 2166 | 2100 |
| 2167 array_cid = PrepareInlineIndexedOp(flow_graph, | 2101 array_cid = PrepareInlineIndexedOp(flow_graph, call, array_cid, &array, index, |
| 2168 call, | |
| 2169 array_cid, | |
| 2170 &array, | |
| 2171 index, | |
| 2172 &cursor); | 2102 &cursor); |
| 2173 | 2103 |
| 2174 intptr_t deopt_id = Thread::kNoDeoptId; | 2104 intptr_t deopt_id = Thread::kNoDeoptId; |
| 2175 if ((array_cid == kTypedDataInt32ArrayCid) || | 2105 if ((array_cid == kTypedDataInt32ArrayCid) || |
| 2176 (array_cid == kTypedDataUint32ArrayCid)) { | 2106 (array_cid == kTypedDataUint32ArrayCid)) { |
| 2177 // Deoptimization may be needed if result does not always fit in a Smi. | 2107 // Deoptimization may be needed if result does not always fit in a Smi. |
| 2178 deopt_id = (kSmiBits >= 32) ? Thread::kNoDeoptId : call->deopt_id(); | 2108 deopt_id = (kSmiBits >= 32) ? Thread::kNoDeoptId : call->deopt_id(); |
| 2179 } | 2109 } |
| 2180 | 2110 |
| 2181 // Array load and return. | 2111 // Array load and return. |
| 2182 intptr_t index_scale = Instance::ElementSizeFor(array_cid); | 2112 intptr_t index_scale = Instance::ElementSizeFor(array_cid); |
| 2183 *last = new(Z) LoadIndexedInstr(new(Z) Value(array), | 2113 *last = new (Z) |
| 2184 new(Z) Value(index), | 2114 LoadIndexedInstr(new (Z) Value(array), new (Z) Value(index), index_scale, |
| 2185 index_scale, | 2115 array_cid, kAlignedAccess, deopt_id, call->token_pos()); |
| 2186 array_cid, | |
| 2187 kAlignedAccess, | |
| 2188 deopt_id, | |
| 2189 call->token_pos()); | |
| 2190 cursor = flow_graph->AppendTo( | 2116 cursor = flow_graph->AppendTo( |
| 2191 cursor, | 2117 cursor, *last, deopt_id != Thread::kNoDeoptId ? call->env() : NULL, |
| 2192 *last, | |
| 2193 deopt_id != Thread::kNoDeoptId ? call->env() : NULL, | |
| 2194 FlowGraph::kValue); | 2118 FlowGraph::kValue); |
| 2195 | 2119 |
| 2196 if (array_cid == kTypedDataFloat32ArrayCid) { | 2120 if (array_cid == kTypedDataFloat32ArrayCid) { |
| 2197 *last = new(Z) FloatToDoubleInstr(new(Z) Value(*last), deopt_id); | 2121 *last = new (Z) FloatToDoubleInstr(new (Z) Value(*last), deopt_id); |
| 2198 flow_graph->AppendTo(cursor, | 2122 flow_graph->AppendTo(cursor, *last, |
| 2199 *last, | |
| 2200 deopt_id != Thread::kNoDeoptId ? call->env() : NULL, | 2123 deopt_id != Thread::kNoDeoptId ? call->env() : NULL, |
| 2201 FlowGraph::kValue); | 2124 FlowGraph::kValue); |
| 2202 } | 2125 } |
| 2203 return true; | 2126 return true; |
| 2204 } | 2127 } |
| 2205 | 2128 |
| 2206 | 2129 |
| 2207 static bool InlineSetIndexed(FlowGraph* flow_graph, | 2130 static bool InlineSetIndexed(FlowGraph* flow_graph, |
| 2208 MethodRecognizer::Kind kind, | 2131 MethodRecognizer::Kind kind, |
| 2209 const Function& target, | 2132 const Function& target, |
| 2210 Instruction* call, | 2133 Instruction* call, |
| 2211 Definition* receiver, | 2134 Definition* receiver, |
| 2212 TokenPosition token_pos, | 2135 TokenPosition token_pos, |
| 2213 const ICData& value_check, | 2136 const ICData& value_check, |
| 2214 TargetEntryInstr** entry, | 2137 TargetEntryInstr** entry, |
| 2215 Definition** last) { | 2138 Definition** last) { |
| 2216 intptr_t array_cid = MethodRecognizer::MethodKindToReceiverCid(kind); | 2139 intptr_t array_cid = MethodRecognizer::MethodKindToReceiverCid(kind); |
| 2217 | 2140 |
| 2218 Definition* array = receiver; | 2141 Definition* array = receiver; |
| 2219 Definition* index = call->ArgumentAt(1); | 2142 Definition* index = call->ArgumentAt(1); |
| 2220 Definition* stored_value = call->ArgumentAt(2); | 2143 Definition* stored_value = call->ArgumentAt(2); |
| 2221 | 2144 |
| 2222 *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(), | 2145 *entry = new (Z) TargetEntryInstr(flow_graph->allocate_block_id(), |
| 2223 call->GetBlock()->try_index()); | 2146 call->GetBlock()->try_index()); |
| 2224 (*entry)->InheritDeoptTarget(Z, call); | 2147 (*entry)->InheritDeoptTarget(Z, call); |
| 2225 Instruction* cursor = *entry; | 2148 Instruction* cursor = *entry; |
| 2226 if (flow_graph->isolate()->type_checks()) { | 2149 if (flow_graph->isolate()->type_checks()) { |
| 2227 // Only type check for the value. A type check for the index is not | 2150 // Only type check for the value. A type check for the index is not |
| 2228 // needed here because we insert a deoptimizing smi-check for the case | 2151 // needed here because we insert a deoptimizing smi-check for the case |
| 2229 // the index is not a smi. | 2152 // the index is not a smi. |
| 2230 const AbstractType& value_type = | 2153 const AbstractType& value_type = |
| 2231 AbstractType::ZoneHandle(Z, target.ParameterTypeAt(2)); | 2154 AbstractType::ZoneHandle(Z, target.ParameterTypeAt(2)); |
| 2232 Definition* type_args = NULL; | 2155 Definition* type_args = NULL; |
| 2233 switch (array_cid) { | 2156 switch (array_cid) { |
| 2234 case kArrayCid: | 2157 case kArrayCid: |
| 2235 case kGrowableObjectArrayCid: { | 2158 case kGrowableObjectArrayCid: { |
| 2236 const Class& instantiator_class = Class::Handle(Z, target.Owner()); | 2159 const Class& instantiator_class = Class::Handle(Z, target.Owner()); |
| 2237 intptr_t type_arguments_field_offset = | 2160 intptr_t type_arguments_field_offset = |
| 2238 instantiator_class.type_arguments_field_offset(); | 2161 instantiator_class.type_arguments_field_offset(); |
| 2239 LoadFieldInstr* load_type_args = | 2162 LoadFieldInstr* load_type_args = new (Z) |
| 2240 new(Z) LoadFieldInstr(new(Z) Value(array), | 2163 LoadFieldInstr(new (Z) Value(array), type_arguments_field_offset, |
| 2241 type_arguments_field_offset, | 2164 Type::ZoneHandle(Z), // No type. |
| 2242 Type::ZoneHandle(Z), // No type. | 2165 call->token_pos()); |
| 2243 call->token_pos()); | 2166 cursor = flow_graph->AppendTo(cursor, load_type_args, NULL, |
| 2244 cursor = flow_graph->AppendTo(cursor, | |
| 2245 load_type_args, | |
| 2246 NULL, | |
| 2247 FlowGraph::kValue); | 2167 FlowGraph::kValue); |
| 2248 | 2168 |
| 2249 type_args = load_type_args; | 2169 type_args = load_type_args; |
| 2250 break; | 2170 break; |
| 2251 } | 2171 } |
| 2252 case kTypedDataInt8ArrayCid: | 2172 case kTypedDataInt8ArrayCid: |
| 2253 case kTypedDataUint8ArrayCid: | 2173 case kTypedDataUint8ArrayCid: |
| 2254 case kTypedDataUint8ClampedArrayCid: | 2174 case kTypedDataUint8ClampedArrayCid: |
| 2255 case kExternalTypedDataUint8ArrayCid: | 2175 case kExternalTypedDataUint8ArrayCid: |
| 2256 case kExternalTypedDataUint8ClampedArrayCid: | 2176 case kExternalTypedDataUint8ClampedArrayCid: |
| 2257 case kTypedDataInt16ArrayCid: | 2177 case kTypedDataInt16ArrayCid: |
| 2258 case kTypedDataUint16ArrayCid: | 2178 case kTypedDataUint16ArrayCid: |
| 2259 case kTypedDataInt32ArrayCid: | 2179 case kTypedDataInt32ArrayCid: |
| 2260 case kTypedDataUint32ArrayCid: | 2180 case kTypedDataUint32ArrayCid: |
| 2261 case kTypedDataInt64ArrayCid: | 2181 case kTypedDataInt64ArrayCid: |
| 2262 ASSERT(value_type.IsIntType()); | 2182 ASSERT(value_type.IsIntType()); |
| 2263 // Fall through. | 2183 // Fall through. |
| 2264 case kTypedDataFloat32ArrayCid: | 2184 case kTypedDataFloat32ArrayCid: |
| 2265 case kTypedDataFloat64ArrayCid: { | 2185 case kTypedDataFloat64ArrayCid: { |
| 2266 type_args = flow_graph->constant_null(); | 2186 type_args = flow_graph->constant_null(); |
| 2267 ASSERT((array_cid != kTypedDataFloat32ArrayCid && | 2187 ASSERT((array_cid != kTypedDataFloat32ArrayCid && |
| 2268 array_cid != kTypedDataFloat64ArrayCid) || | 2188 array_cid != kTypedDataFloat64ArrayCid) || |
| 2269 value_type.IsDoubleType()); | 2189 value_type.IsDoubleType()); |
| 2270 ASSERT(value_type.IsInstantiated()); | 2190 ASSERT(value_type.IsInstantiated()); |
| 2271 break; | 2191 break; |
| 2272 } | 2192 } |
| 2273 case kTypedDataFloat32x4ArrayCid: { | 2193 case kTypedDataFloat32x4ArrayCid: { |
| 2274 type_args = flow_graph->constant_null(); | 2194 type_args = flow_graph->constant_null(); |
| 2275 ASSERT((array_cid != kTypedDataFloat32x4ArrayCid) || | 2195 ASSERT((array_cid != kTypedDataFloat32x4ArrayCid) || |
| 2276 value_type.IsFloat32x4Type()); | 2196 value_type.IsFloat32x4Type()); |
| 2277 ASSERT(value_type.IsInstantiated()); | 2197 ASSERT(value_type.IsInstantiated()); |
| 2278 break; | 2198 break; |
| 2279 } | 2199 } |
| 2280 case kTypedDataFloat64x2ArrayCid: { | 2200 case kTypedDataFloat64x2ArrayCid: { |
| 2281 type_args = flow_graph->constant_null(); | 2201 type_args = flow_graph->constant_null(); |
| 2282 ASSERT((array_cid != kTypedDataFloat64x2ArrayCid) || | 2202 ASSERT((array_cid != kTypedDataFloat64x2ArrayCid) || |
| 2283 value_type.IsFloat64x2Type()); | 2203 value_type.IsFloat64x2Type()); |
| 2284 ASSERT(value_type.IsInstantiated()); | 2204 ASSERT(value_type.IsInstantiated()); |
| 2285 break; | 2205 break; |
| 2286 } | 2206 } |
| 2287 default: | 2207 default: |
| 2288 // TODO(fschneider): Add support for other array types. | 2208 // TODO(fschneider): Add support for other array types. |
| 2289 UNREACHABLE(); | 2209 UNREACHABLE(); |
| 2290 } | 2210 } |
| 2291 AssertAssignableInstr* assert_value = | 2211 AssertAssignableInstr* assert_value = new (Z) AssertAssignableInstr( |
| 2292 new(Z) AssertAssignableInstr(token_pos, | 2212 token_pos, new (Z) Value(stored_value), new (Z) Value(type_args), |
| 2293 new(Z) Value(stored_value), | 2213 value_type, Symbols::Value(), call->deopt_id()); |
| 2294 new(Z) Value(type_args), | 2214 cursor = flow_graph->AppendTo(cursor, assert_value, call->env(), |
| 2295 value_type, | |
| 2296 Symbols::Value(), | |
| 2297 call->deopt_id()); | |
| 2298 cursor = flow_graph->AppendTo(cursor, | |
| 2299 assert_value, | |
| 2300 call->env(), | |
| 2301 FlowGraph::kValue); | 2215 FlowGraph::kValue); |
| 2302 } | 2216 } |
| 2303 | 2217 |
| 2304 array_cid = PrepareInlineIndexedOp(flow_graph, | 2218 array_cid = PrepareInlineIndexedOp(flow_graph, call, array_cid, &array, index, |
| 2305 call, | |
| 2306 array_cid, | |
| 2307 &array, | |
| 2308 index, | |
| 2309 &cursor); | 2219 &cursor); |
| 2310 | 2220 |
| 2311 // Check if store barrier is needed. Byte arrays don't need a store barrier. | 2221 // Check if store barrier is needed. Byte arrays don't need a store barrier. |
| 2312 StoreBarrierType needs_store_barrier = | 2222 StoreBarrierType needs_store_barrier = |
| 2313 (RawObject::IsTypedDataClassId(array_cid) || | 2223 (RawObject::IsTypedDataClassId(array_cid) || |
| 2314 RawObject::IsTypedDataViewClassId(array_cid) || | 2224 RawObject::IsTypedDataViewClassId(array_cid) || |
| 2315 RawObject::IsExternalTypedDataClassId(array_cid)) ? kNoStoreBarrier | 2225 RawObject::IsExternalTypedDataClassId(array_cid)) |
| 2316 : kEmitStoreBarrier; | 2226 ? kNoStoreBarrier |
| 2227 : kEmitStoreBarrier; |
| 2317 | 2228 |
| 2318 // No need to class check stores to Int32 and Uint32 arrays because | 2229 // No need to class check stores to Int32 and Uint32 arrays because |
| 2319 // we insert unboxing instructions below which include a class check. | 2230 // we insert unboxing instructions below which include a class check. |
| 2320 if ((array_cid != kTypedDataUint32ArrayCid) && | 2231 if ((array_cid != kTypedDataUint32ArrayCid) && |
| 2321 (array_cid != kTypedDataInt32ArrayCid) && | 2232 (array_cid != kTypedDataInt32ArrayCid) && !value_check.IsNull()) { |
| 2322 !value_check.IsNull()) { | |
| 2323 // No store barrier needed because checked value is a smi, an unboxed mint, | 2233 // No store barrier needed because checked value is a smi, an unboxed mint, |
| 2324 // an unboxed double, an unboxed Float32x4, or unboxed Int32x4. | 2234 // an unboxed double, an unboxed Float32x4, or unboxed Int32x4. |
| 2325 needs_store_barrier = kNoStoreBarrier; | 2235 needs_store_barrier = kNoStoreBarrier; |
| 2326 Instruction* check = GetCheckClass(flow_graph, | 2236 Instruction* check = GetCheckClass(flow_graph, stored_value, value_check, |
| 2327 stored_value, | 2237 call->deopt_id(), call->token_pos()); |
| 2328 value_check, | 2238 cursor = |
| 2329 call->deopt_id(), | 2239 flow_graph->AppendTo(cursor, check, call->env(), FlowGraph::kEffect); |
| 2330 call->token_pos()); | |
| 2331 cursor = flow_graph->AppendTo(cursor, | |
| 2332 check, | |
| 2333 call->env(), | |
| 2334 FlowGraph::kEffect); | |
| 2335 } | 2240 } |
| 2336 | 2241 |
| 2337 if (array_cid == kTypedDataFloat32ArrayCid) { | 2242 if (array_cid == kTypedDataFloat32ArrayCid) { |
| 2243 stored_value = new (Z) |
| 2244 DoubleToFloatInstr(new (Z) Value(stored_value), call->deopt_id()); |
| 2245 cursor = |
| 2246 flow_graph->AppendTo(cursor, stored_value, NULL, FlowGraph::kValue); |
| 2247 } else if (array_cid == kTypedDataInt32ArrayCid) { |
| 2338 stored_value = | 2248 stored_value = |
| 2339 new(Z) DoubleToFloatInstr( | 2249 new (Z) UnboxInt32Instr(UnboxInt32Instr::kTruncate, |
| 2340 new(Z) Value(stored_value), call->deopt_id()); | 2250 new (Z) Value(stored_value), call->deopt_id()); |
| 2341 cursor = flow_graph->AppendTo(cursor, | 2251 cursor = flow_graph->AppendTo(cursor, stored_value, call->env(), |
| 2342 stored_value, | |
| 2343 NULL, | |
| 2344 FlowGraph::kValue); | |
| 2345 } else if (array_cid == kTypedDataInt32ArrayCid) { | |
| 2346 stored_value = new(Z) UnboxInt32Instr( | |
| 2347 UnboxInt32Instr::kTruncate, | |
| 2348 new(Z) Value(stored_value), | |
| 2349 call->deopt_id()); | |
| 2350 cursor = flow_graph->AppendTo(cursor, | |
| 2351 stored_value, | |
| 2352 call->env(), | |
| 2353 FlowGraph::kValue); | 2252 FlowGraph::kValue); |
| 2354 } else if (array_cid == kTypedDataUint32ArrayCid) { | 2253 } else if (array_cid == kTypedDataUint32ArrayCid) { |
| 2355 stored_value = new(Z) UnboxUint32Instr( | 2254 stored_value = |
| 2356 new(Z) Value(stored_value), | 2255 new (Z) UnboxUint32Instr(new (Z) Value(stored_value), call->deopt_id()); |
| 2357 call->deopt_id()); | |
| 2358 ASSERT(stored_value->AsUnboxInteger()->is_truncating()); | 2256 ASSERT(stored_value->AsUnboxInteger()->is_truncating()); |
| 2359 cursor = flow_graph->AppendTo(cursor, | 2257 cursor = flow_graph->AppendTo(cursor, stored_value, call->env(), |
| 2360 stored_value, | |
| 2361 call->env(), | |
| 2362 FlowGraph::kValue); | 2258 FlowGraph::kValue); |
| 2363 } | 2259 } |
| 2364 | 2260 |
| 2365 const intptr_t index_scale = Instance::ElementSizeFor(array_cid); | 2261 const intptr_t index_scale = Instance::ElementSizeFor(array_cid); |
| 2366 *last = new(Z) StoreIndexedInstr(new(Z) Value(array), | 2262 *last = new (Z) StoreIndexedInstr( |
| 2367 new(Z) Value(index), | 2263 new (Z) Value(array), new (Z) Value(index), new (Z) Value(stored_value), |
| 2368 new(Z) Value(stored_value), | 2264 needs_store_barrier, index_scale, array_cid, kAlignedAccess, |
| 2369 needs_store_barrier, | 2265 call->deopt_id(), call->token_pos()); |
| 2370 index_scale, | 2266 flow_graph->AppendTo(cursor, *last, call->env(), FlowGraph::kEffect); |
| 2371 array_cid, | |
| 2372 kAlignedAccess, | |
| 2373 call->deopt_id(), | |
| 2374 call->token_pos()); | |
| 2375 flow_graph->AppendTo(cursor, | |
| 2376 *last, | |
| 2377 call->env(), | |
| 2378 FlowGraph::kEffect); | |
| 2379 return true; | 2267 return true; |
| 2380 } | 2268 } |
| 2381 | 2269 |
| 2382 | 2270 |
| 2383 static bool InlineDoubleOp(FlowGraph* flow_graph, | 2271 static bool InlineDoubleOp(FlowGraph* flow_graph, |
| 2384 Token::Kind op_kind, | 2272 Token::Kind op_kind, |
| 2385 Instruction* call, | 2273 Instruction* call, |
| 2386 TargetEntryInstr** entry, | 2274 TargetEntryInstr** entry, |
| 2387 Definition** last) { | 2275 Definition** last) { |
| 2388 if (!CanUnboxDouble()) { | 2276 if (!CanUnboxDouble()) { |
| 2389 return false; | 2277 return false; |
| 2390 } | 2278 } |
| 2391 Definition* left = call->ArgumentAt(0); | 2279 Definition* left = call->ArgumentAt(0); |
| 2392 Definition* right = call->ArgumentAt(1); | 2280 Definition* right = call->ArgumentAt(1); |
| 2393 | 2281 |
| 2394 *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(), | 2282 *entry = new (Z) TargetEntryInstr(flow_graph->allocate_block_id(), |
| 2395 call->GetBlock()->try_index()); | 2283 call->GetBlock()->try_index()); |
| 2396 (*entry)->InheritDeoptTarget(Z, call); | 2284 (*entry)->InheritDeoptTarget(Z, call); |
| 2397 // Arguments are checked. No need for class check. | 2285 // Arguments are checked. No need for class check. |
| 2398 BinaryDoubleOpInstr* double_bin_op = | 2286 BinaryDoubleOpInstr* double_bin_op = new (Z) |
| 2399 new(Z) BinaryDoubleOpInstr(op_kind, | 2287 BinaryDoubleOpInstr(op_kind, new (Z) Value(left), new (Z) Value(right), |
| 2400 new(Z) Value(left), | 2288 call->deopt_id(), call->token_pos()); |
| 2401 new(Z) Value(right), | |
| 2402 call->deopt_id(), call->token_pos()); | |
| 2403 flow_graph->AppendTo(*entry, double_bin_op, call->env(), FlowGraph::kValue); | 2289 flow_graph->AppendTo(*entry, double_bin_op, call->env(), FlowGraph::kValue); |
| 2404 *last = double_bin_op; | 2290 *last = double_bin_op; |
| 2405 | 2291 |
| 2406 return true; | 2292 return true; |
| 2407 } | 2293 } |
| 2408 | 2294 |
| 2409 | 2295 |
| 2410 static bool InlineDoubleTestOp(FlowGraph* flow_graph, | 2296 static bool InlineDoubleTestOp(FlowGraph* flow_graph, |
| 2411 Instruction* call, | 2297 Instruction* call, |
| 2412 MethodRecognizer::Kind kind, | 2298 MethodRecognizer::Kind kind, |
| 2413 TargetEntryInstr** entry, | 2299 TargetEntryInstr** entry, |
| 2414 Definition** last) { | 2300 Definition** last) { |
| 2415 if (!CanUnboxDouble()) { | 2301 if (!CanUnboxDouble()) { |
| 2416 return false; | 2302 return false; |
| 2417 } | 2303 } |
| 2418 Definition* d = call->ArgumentAt(0); | 2304 Definition* d = call->ArgumentAt(0); |
| 2419 | 2305 |
| 2420 *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(), | 2306 *entry = new (Z) TargetEntryInstr(flow_graph->allocate_block_id(), |
| 2421 call->GetBlock()->try_index()); | 2307 call->GetBlock()->try_index()); |
| 2422 (*entry)->InheritDeoptTarget(Z, call); | 2308 (*entry)->InheritDeoptTarget(Z, call); |
| 2423 // Arguments are checked. No need for class check. | 2309 // Arguments are checked. No need for class check. |
| 2424 | 2310 |
| 2425 DoubleTestOpInstr* double_test_op = | 2311 DoubleTestOpInstr* double_test_op = new (Z) DoubleTestOpInstr( |
| 2426 new(Z) DoubleTestOpInstr(kind, | 2312 kind, new (Z) Value(d), call->deopt_id(), call->token_pos()); |
| 2427 new(Z) Value(d), | 2313 flow_graph->AppendTo(*entry, double_test_op, call->env(), FlowGraph::kValue); |
| 2428 call->deopt_id(), | |
| 2429 call->token_pos()); | |
| 2430 flow_graph->AppendTo( | |
| 2431 *entry, double_test_op, call->env(), FlowGraph::kValue); | |
| 2432 *last = double_test_op; | 2314 *last = double_test_op; |
| 2433 | 2315 |
| 2434 return true; | 2316 return true; |
| 2435 } | 2317 } |
| 2436 | 2318 |
| 2437 | 2319 |
| 2438 static bool InlineSmiBitAndFromSmi(FlowGraph* flow_graph, | 2320 static bool InlineSmiBitAndFromSmi(FlowGraph* flow_graph, |
| 2439 Instruction* call, | 2321 Instruction* call, |
| 2440 TargetEntryInstr** entry, | 2322 TargetEntryInstr** entry, |
| 2441 Definition** last) { | 2323 Definition** last) { |
| 2442 Definition* left = call->ArgumentAt(0); | 2324 Definition* left = call->ArgumentAt(0); |
| 2443 Definition* right = call->ArgumentAt(1); | 2325 Definition* right = call->ArgumentAt(1); |
| 2444 | 2326 |
| 2445 *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(), | 2327 *entry = new (Z) TargetEntryInstr(flow_graph->allocate_block_id(), |
| 2446 call->GetBlock()->try_index()); | 2328 call->GetBlock()->try_index()); |
| 2447 (*entry)->InheritDeoptTarget(Z, call); | 2329 (*entry)->InheritDeoptTarget(Z, call); |
| 2448 // Right arguments is known to be smi: other._bitAndFromSmi(this); | 2330 // Right arguments is known to be smi: other._bitAndFromSmi(this); |
| 2449 BinarySmiOpInstr* smi_op = | 2331 BinarySmiOpInstr* smi_op = |
| 2450 new(Z) BinarySmiOpInstr(Token::kBIT_AND, | 2332 new (Z) BinarySmiOpInstr(Token::kBIT_AND, new (Z) Value(left), |
| 2451 new(Z) Value(left), | 2333 new (Z) Value(right), call->deopt_id()); |
| 2452 new(Z) Value(right), | |
| 2453 call->deopt_id()); | |
| 2454 flow_graph->AppendTo(*entry, smi_op, call->env(), FlowGraph::kValue); | 2334 flow_graph->AppendTo(*entry, smi_op, call->env(), FlowGraph::kValue); |
| 2455 *last = smi_op; | 2335 *last = smi_op; |
| 2456 | 2336 |
| 2457 return true; | 2337 return true; |
| 2458 } | 2338 } |
| 2459 | 2339 |
| 2460 | 2340 |
| 2461 static bool InlineGrowableArraySetter(FlowGraph* flow_graph, | 2341 static bool InlineGrowableArraySetter(FlowGraph* flow_graph, |
| 2462 intptr_t offset, | 2342 intptr_t offset, |
| 2463 StoreBarrierType store_barrier_type, | 2343 StoreBarrierType store_barrier_type, |
| 2464 Instruction* call, | 2344 Instruction* call, |
| 2465 TargetEntryInstr** entry, | 2345 TargetEntryInstr** entry, |
| 2466 Definition** last) { | 2346 Definition** last) { |
| 2467 Definition* array = call->ArgumentAt(0); | 2347 Definition* array = call->ArgumentAt(0); |
| 2468 Definition* value = call->ArgumentAt(1); | 2348 Definition* value = call->ArgumentAt(1); |
| 2469 | 2349 |
| 2470 *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(), | 2350 *entry = new (Z) TargetEntryInstr(flow_graph->allocate_block_id(), |
| 2471 call->GetBlock()->try_index()); | 2351 call->GetBlock()->try_index()); |
| 2472 (*entry)->InheritDeoptTarget(Z, call); | 2352 (*entry)->InheritDeoptTarget(Z, call); |
| 2473 | 2353 |
| 2474 // This is an internal method, no need to check argument types. | 2354 // This is an internal method, no need to check argument types. |
| 2475 StoreInstanceFieldInstr* store = new(Z) StoreInstanceFieldInstr( | 2355 StoreInstanceFieldInstr* store = new (Z) StoreInstanceFieldInstr( |
| 2476 offset, | 2356 offset, new (Z) Value(array), new (Z) Value(value), store_barrier_type, |
| 2477 new(Z) Value(array), | |
| 2478 new(Z) Value(value), | |
| 2479 store_barrier_type, | |
| 2480 call->token_pos()); | 2357 call->token_pos()); |
| 2481 flow_graph->AppendTo(*entry, store, call->env(), FlowGraph::kEffect); | 2358 flow_graph->AppendTo(*entry, store, call->env(), FlowGraph::kEffect); |
| 2482 *last = store; | 2359 *last = store; |
| 2483 | 2360 |
| 2484 return true; | 2361 return true; |
| 2485 } | 2362 } |
| 2486 | 2363 |
| 2487 | 2364 |
| 2488 static void PrepareInlineByteArrayBaseOp( | 2365 static void PrepareInlineByteArrayBaseOp(FlowGraph* flow_graph, |
| 2489 FlowGraph* flow_graph, | 2366 Instruction* call, |
| 2490 Instruction* call, | 2367 intptr_t array_cid, |
| 2491 intptr_t array_cid, | 2368 intptr_t view_cid, |
| 2492 intptr_t view_cid, | 2369 Definition** array, |
| 2493 Definition** array, | 2370 Definition* byte_index, |
| 2494 Definition* byte_index, | 2371 Instruction** cursor) { |
| 2495 Instruction** cursor) { | 2372 LoadFieldInstr* length = new (Z) LoadFieldInstr( |
| 2496 LoadFieldInstr* length = | 2373 new (Z) Value(*array), CheckArrayBoundInstr::LengthOffsetFor(array_cid), |
| 2497 new(Z) LoadFieldInstr( | 2374 Type::ZoneHandle(Z, Type::SmiType()), call->token_pos()); |
| 2498 new(Z) Value(*array), | |
| 2499 CheckArrayBoundInstr::LengthOffsetFor(array_cid), | |
| 2500 Type::ZoneHandle(Z, Type::SmiType()), | |
| 2501 call->token_pos()); | |
| 2502 length->set_is_immutable(true); | 2375 length->set_is_immutable(true); |
| 2503 length->set_result_cid(kSmiCid); | 2376 length->set_result_cid(kSmiCid); |
| 2504 length->set_recognized_kind( | 2377 length->set_recognized_kind( |
| 2505 LoadFieldInstr::RecognizedKindFromArrayCid(array_cid)); | 2378 LoadFieldInstr::RecognizedKindFromArrayCid(array_cid)); |
| 2506 *cursor = flow_graph->AppendTo(*cursor, | 2379 *cursor = flow_graph->AppendTo(*cursor, length, NULL, FlowGraph::kValue); |
| 2507 length, | |
| 2508 NULL, | |
| 2509 FlowGraph::kValue); | |
| 2510 | 2380 |
| 2511 intptr_t element_size = Instance::ElementSizeFor(array_cid); | 2381 intptr_t element_size = Instance::ElementSizeFor(array_cid); |
| 2512 ConstantInstr* bytes_per_element = | 2382 ConstantInstr* bytes_per_element = |
| 2513 flow_graph->GetConstant(Smi::Handle(Z, Smi::New(element_size))); | 2383 flow_graph->GetConstant(Smi::Handle(Z, Smi::New(element_size))); |
| 2514 BinarySmiOpInstr* len_in_bytes = | 2384 BinarySmiOpInstr* len_in_bytes = new (Z) |
| 2515 new(Z) BinarySmiOpInstr(Token::kMUL, | 2385 BinarySmiOpInstr(Token::kMUL, new (Z) Value(length), |
| 2516 new(Z) Value(length), | 2386 new (Z) Value(bytes_per_element), call->deopt_id()); |
| 2517 new(Z) Value(bytes_per_element), | |
| 2518 call->deopt_id()); | |
| 2519 *cursor = flow_graph->AppendTo(*cursor, len_in_bytes, call->env(), | 2387 *cursor = flow_graph->AppendTo(*cursor, len_in_bytes, call->env(), |
| 2520 FlowGraph::kValue); | 2388 FlowGraph::kValue); |
| 2521 | 2389 |
| 2522 // adjusted_length = len_in_bytes - (element_size - 1). | 2390 // adjusted_length = len_in_bytes - (element_size - 1). |
| 2523 Definition* adjusted_length = len_in_bytes; | 2391 Definition* adjusted_length = len_in_bytes; |
| 2524 intptr_t adjustment = Instance::ElementSizeFor(view_cid) - 1; | 2392 intptr_t adjustment = Instance::ElementSizeFor(view_cid) - 1; |
| 2525 if (adjustment > 0) { | 2393 if (adjustment > 0) { |
| 2526 ConstantInstr* length_adjustment = | 2394 ConstantInstr* length_adjustment = |
| 2527 flow_graph->GetConstant(Smi::Handle(Z, Smi::New(adjustment))); | 2395 flow_graph->GetConstant(Smi::Handle(Z, Smi::New(adjustment))); |
| 2528 adjusted_length = | 2396 adjusted_length = new (Z) |
| 2529 new(Z) BinarySmiOpInstr(Token::kSUB, | 2397 BinarySmiOpInstr(Token::kSUB, new (Z) Value(len_in_bytes), |
| 2530 new(Z) Value(len_in_bytes), | 2398 new (Z) Value(length_adjustment), call->deopt_id()); |
| 2531 new(Z) Value(length_adjustment), | |
| 2532 call->deopt_id()); | |
| 2533 *cursor = flow_graph->AppendTo(*cursor, adjusted_length, call->env(), | 2399 *cursor = flow_graph->AppendTo(*cursor, adjusted_length, call->env(), |
| 2534 FlowGraph::kValue); | 2400 FlowGraph::kValue); |
| 2535 } | 2401 } |
| 2536 | 2402 |
| 2537 // Check adjusted_length > 0. | 2403 // Check adjusted_length > 0. |
| 2538 ConstantInstr* zero = | 2404 ConstantInstr* zero = flow_graph->GetConstant(Smi::Handle(Z, Smi::New(0))); |
| 2539 flow_graph->GetConstant(Smi::Handle(Z, Smi::New(0))); | 2405 *cursor = flow_graph->AppendTo( |
| 2540 *cursor = flow_graph->AppendTo(*cursor, | 2406 *cursor, |
| 2541 new(Z) CheckArrayBoundInstr( | 2407 new (Z) CheckArrayBoundInstr(new (Z) Value(adjusted_length), |
| 2542 new(Z) Value(adjusted_length), | 2408 new (Z) Value(zero), call->deopt_id()), |
| 2543 new(Z) Value(zero), | 2409 call->env(), FlowGraph::kEffect); |
| 2544 call->deopt_id()), | |
| 2545 call->env(), | |
| 2546 FlowGraph::kEffect); | |
| 2547 // Check 0 <= byte_index < adjusted_length. | 2410 // Check 0 <= byte_index < adjusted_length. |
| 2548 *cursor = flow_graph->AppendTo(*cursor, | 2411 *cursor = flow_graph->AppendTo( |
| 2549 new(Z) CheckArrayBoundInstr( | 2412 *cursor, |
| 2550 new(Z) Value(adjusted_length), | 2413 new (Z) CheckArrayBoundInstr(new (Z) Value(adjusted_length), |
| 2551 new(Z) Value(byte_index), | 2414 new (Z) Value(byte_index), call->deopt_id()), |
| 2552 call->deopt_id()), | 2415 call->env(), FlowGraph::kEffect); |
| 2553 call->env(), | |
| 2554 FlowGraph::kEffect); | |
| 2555 | 2416 |
| 2556 if (RawObject::IsExternalTypedDataClassId(array_cid)) { | 2417 if (RawObject::IsExternalTypedDataClassId(array_cid)) { |
| 2557 LoadUntaggedInstr* elements = | 2418 LoadUntaggedInstr* elements = new (Z) LoadUntaggedInstr( |
| 2558 new(Z) LoadUntaggedInstr(new(Z) Value(*array), | 2419 new (Z) Value(*array), ExternalTypedData::data_offset()); |
| 2559 ExternalTypedData::data_offset()); | 2420 *cursor = flow_graph->AppendTo(*cursor, elements, NULL, FlowGraph::kValue); |
| 2560 *cursor = flow_graph->AppendTo(*cursor, | |
| 2561 elements, | |
| 2562 NULL, | |
| 2563 FlowGraph::kValue); | |
| 2564 *array = elements; | 2421 *array = elements; |
| 2565 } | 2422 } |
| 2566 } | 2423 } |
| 2567 | 2424 |
| 2568 | 2425 |
| 2569 static bool InlineByteArrayBaseLoad(FlowGraph* flow_graph, | 2426 static bool InlineByteArrayBaseLoad(FlowGraph* flow_graph, |
| 2570 Instruction* call, | 2427 Instruction* call, |
| 2571 Definition* receiver, | 2428 Definition* receiver, |
| 2572 intptr_t array_cid, | 2429 intptr_t array_cid, |
| 2573 intptr_t view_cid, | 2430 intptr_t view_cid, |
| 2574 TargetEntryInstr** entry, | 2431 TargetEntryInstr** entry, |
| 2575 Definition** last) { | 2432 Definition** last) { |
| 2576 ASSERT(array_cid != kIllegalCid); | 2433 ASSERT(array_cid != kIllegalCid); |
| 2577 Definition* array = receiver; | 2434 Definition* array = receiver; |
| 2578 Definition* index = call->ArgumentAt(1); | 2435 Definition* index = call->ArgumentAt(1); |
| 2579 *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(), | 2436 *entry = new (Z) TargetEntryInstr(flow_graph->allocate_block_id(), |
| 2580 call->GetBlock()->try_index()); | 2437 call->GetBlock()->try_index()); |
| 2581 (*entry)->InheritDeoptTarget(Z, call); | 2438 (*entry)->InheritDeoptTarget(Z, call); |
| 2582 Instruction* cursor = *entry; | 2439 Instruction* cursor = *entry; |
| 2583 | 2440 |
| 2584 PrepareInlineByteArrayBaseOp(flow_graph, | 2441 PrepareInlineByteArrayBaseOp(flow_graph, call, array_cid, view_cid, &array, |
| 2585 call, | 2442 index, &cursor); |
| 2586 array_cid, | |
| 2587 view_cid, | |
| 2588 &array, | |
| 2589 index, | |
| 2590 &cursor); | |
| 2591 | 2443 |
| 2592 intptr_t deopt_id = Thread::kNoDeoptId; | 2444 intptr_t deopt_id = Thread::kNoDeoptId; |
| 2593 if ((array_cid == kTypedDataInt32ArrayCid) || | 2445 if ((array_cid == kTypedDataInt32ArrayCid) || |
| 2594 (array_cid == kTypedDataUint32ArrayCid)) { | 2446 (array_cid == kTypedDataUint32ArrayCid)) { |
| 2595 // Deoptimization may be needed if result does not always fit in a Smi. | 2447 // Deoptimization may be needed if result does not always fit in a Smi. |
| 2596 deopt_id = (kSmiBits >= 32) ? Thread::kNoDeoptId : call->deopt_id(); | 2448 deopt_id = (kSmiBits >= 32) ? Thread::kNoDeoptId : call->deopt_id(); |
| 2597 } | 2449 } |
| 2598 | 2450 |
| 2599 *last = new(Z) LoadIndexedInstr(new(Z) Value(array), | 2451 *last = new (Z) |
| 2600 new(Z) Value(index), | 2452 LoadIndexedInstr(new (Z) Value(array), new (Z) Value(index), 1, view_cid, |
| 2601 1, | 2453 kUnalignedAccess, deopt_id, call->token_pos()); |
| 2602 view_cid, | |
| 2603 kUnalignedAccess, | |
| 2604 deopt_id, | |
| 2605 call->token_pos()); | |
| 2606 cursor = flow_graph->AppendTo( | 2454 cursor = flow_graph->AppendTo( |
| 2607 cursor, | 2455 cursor, *last, deopt_id != Thread::kNoDeoptId ? call->env() : NULL, |
| 2608 *last, | |
| 2609 deopt_id != Thread::kNoDeoptId ? call->env() : NULL, | |
| 2610 FlowGraph::kValue); | 2456 FlowGraph::kValue); |
| 2611 | 2457 |
| 2612 if (view_cid == kTypedDataFloat32ArrayCid) { | 2458 if (view_cid == kTypedDataFloat32ArrayCid) { |
| 2613 *last = new(Z) FloatToDoubleInstr(new(Z) Value(*last), deopt_id); | 2459 *last = new (Z) FloatToDoubleInstr(new (Z) Value(*last), deopt_id); |
| 2614 flow_graph->AppendTo(cursor, | 2460 flow_graph->AppendTo(cursor, *last, |
| 2615 *last, | |
| 2616 deopt_id != Thread::kNoDeoptId ? call->env() : NULL, | 2461 deopt_id != Thread::kNoDeoptId ? call->env() : NULL, |
| 2617 FlowGraph::kValue); | 2462 FlowGraph::kValue); |
| 2618 } | 2463 } |
| 2619 return true; | 2464 return true; |
| 2620 } | 2465 } |
| 2621 | 2466 |
| 2622 | 2467 |
| 2623 static bool InlineByteArrayBaseStore(FlowGraph* flow_graph, | 2468 static bool InlineByteArrayBaseStore(FlowGraph* flow_graph, |
| 2624 const Function& target, | 2469 const Function& target, |
| 2625 Instruction* call, | 2470 Instruction* call, |
| 2626 Definition* receiver, | 2471 Definition* receiver, |
| 2627 intptr_t array_cid, | 2472 intptr_t array_cid, |
| 2628 intptr_t view_cid, | 2473 intptr_t view_cid, |
| 2629 TargetEntryInstr** entry, | 2474 TargetEntryInstr** entry, |
| 2630 Definition** last) { | 2475 Definition** last) { |
| 2631 ASSERT(array_cid != kIllegalCid); | 2476 ASSERT(array_cid != kIllegalCid); |
| 2632 Definition* array = receiver; | 2477 Definition* array = receiver; |
| 2633 Definition* index = call->ArgumentAt(1); | 2478 Definition* index = call->ArgumentAt(1); |
| 2634 *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(), | 2479 *entry = new (Z) TargetEntryInstr(flow_graph->allocate_block_id(), |
| 2635 call->GetBlock()->try_index()); | 2480 call->GetBlock()->try_index()); |
| 2636 (*entry)->InheritDeoptTarget(Z, call); | 2481 (*entry)->InheritDeoptTarget(Z, call); |
| 2637 Instruction* cursor = *entry; | 2482 Instruction* cursor = *entry; |
| 2638 | 2483 |
| 2639 PrepareInlineByteArrayBaseOp(flow_graph, | 2484 PrepareInlineByteArrayBaseOp(flow_graph, call, array_cid, view_cid, &array, |
| 2640 call, | 2485 index, &cursor); |
| 2641 array_cid, | |
| 2642 view_cid, | |
| 2643 &array, | |
| 2644 index, | |
| 2645 &cursor); | |
| 2646 | 2486 |
| 2647 // Extract the instance call so we can use the function_name in the stored | 2487 // Extract the instance call so we can use the function_name in the stored |
| 2648 // value check ICData. | 2488 // value check ICData. |
| 2649 InstanceCallInstr* i_call = NULL; | 2489 InstanceCallInstr* i_call = NULL; |
| 2650 if (call->IsPolymorphicInstanceCall()) { | 2490 if (call->IsPolymorphicInstanceCall()) { |
| 2651 i_call = call->AsPolymorphicInstanceCall()->instance_call(); | 2491 i_call = call->AsPolymorphicInstanceCall()->instance_call(); |
| 2652 } else { | 2492 } else { |
| 2653 ASSERT(call->IsInstanceCall()); | 2493 ASSERT(call->IsInstanceCall()); |
| 2654 i_call = call->AsInstanceCall(); | 2494 i_call = call->AsInstanceCall(); |
| 2655 } | 2495 } |
| 2656 ASSERT(i_call != NULL); | 2496 ASSERT(i_call != NULL); |
| 2657 ICData& value_check = ICData::ZoneHandle(Z); | 2497 ICData& value_check = ICData::ZoneHandle(Z); |
| 2658 switch (view_cid) { | 2498 switch (view_cid) { |
| 2659 case kTypedDataInt8ArrayCid: | 2499 case kTypedDataInt8ArrayCid: |
| 2660 case kTypedDataUint8ArrayCid: | 2500 case kTypedDataUint8ArrayCid: |
| 2661 case kTypedDataUint8ClampedArrayCid: | 2501 case kTypedDataUint8ClampedArrayCid: |
| 2662 case kExternalTypedDataUint8ArrayCid: | 2502 case kExternalTypedDataUint8ArrayCid: |
| 2663 case kExternalTypedDataUint8ClampedArrayCid: | 2503 case kExternalTypedDataUint8ClampedArrayCid: |
| 2664 case kTypedDataInt16ArrayCid: | 2504 case kTypedDataInt16ArrayCid: |
| 2665 case kTypedDataUint16ArrayCid: { | 2505 case kTypedDataUint16ArrayCid: { |
| 2666 // Check that value is always smi. | 2506 // Check that value is always smi. |
| 2667 value_check = ICData::New(flow_graph->function(), | 2507 value_check = ICData::New(flow_graph->function(), i_call->function_name(), |
| 2668 i_call->function_name(), | |
| 2669 Object::empty_array(), // Dummy args. descr. | 2508 Object::empty_array(), // Dummy args. descr. |
| 2670 Thread::kNoDeoptId, | 2509 Thread::kNoDeoptId, 1, false); |
| 2671 1, | |
| 2672 false); | |
| 2673 value_check.AddReceiverCheck(kSmiCid, target); | 2510 value_check.AddReceiverCheck(kSmiCid, target); |
| 2674 break; | 2511 break; |
| 2675 } | 2512 } |
| 2676 case kTypedDataInt32ArrayCid: | 2513 case kTypedDataInt32ArrayCid: |
| 2677 case kTypedDataUint32ArrayCid: | 2514 case kTypedDataUint32ArrayCid: |
| 2678 // On 64-bit platforms assume that stored value is always a smi. | 2515 // On 64-bit platforms assume that stored value is always a smi. |
| 2679 if (kSmiBits >= 32) { | 2516 if (kSmiBits >= 32) { |
| 2680 value_check = ICData::New(flow_graph->function(), | 2517 value_check = |
| 2681 i_call->function_name(), | 2518 ICData::New(flow_graph->function(), i_call->function_name(), |
| 2682 Object::empty_array(), // Dummy args. descr. | 2519 Object::empty_array(), // Dummy args. descr. |
| 2683 Thread::kNoDeoptId, | 2520 Thread::kNoDeoptId, 1, false); |
| 2684 1, | |
| 2685 false); | |
| 2686 value_check.AddReceiverCheck(kSmiCid, target); | 2521 value_check.AddReceiverCheck(kSmiCid, target); |
| 2687 } | 2522 } |
| 2688 break; | 2523 break; |
| 2689 case kTypedDataFloat32ArrayCid: | 2524 case kTypedDataFloat32ArrayCid: |
| 2690 case kTypedDataFloat64ArrayCid: { | 2525 case kTypedDataFloat64ArrayCid: { |
| 2691 // Check that value is always double. | 2526 // Check that value is always double. |
| 2692 value_check = ICData::New(flow_graph->function(), | 2527 value_check = ICData::New(flow_graph->function(), i_call->function_name(), |
| 2693 i_call->function_name(), | |
| 2694 Object::empty_array(), // Dummy args. descr. | 2528 Object::empty_array(), // Dummy args. descr. |
| 2695 Thread::kNoDeoptId, | 2529 Thread::kNoDeoptId, 1, false); |
| 2696 1, | |
| 2697 false); | |
| 2698 value_check.AddReceiverCheck(kDoubleCid, target); | 2530 value_check.AddReceiverCheck(kDoubleCid, target); |
| 2699 break; | 2531 break; |
| 2700 } | 2532 } |
| 2701 case kTypedDataInt32x4ArrayCid: { | 2533 case kTypedDataInt32x4ArrayCid: { |
| 2702 // Check that value is always Int32x4. | 2534 // Check that value is always Int32x4. |
| 2703 value_check = ICData::New(flow_graph->function(), | 2535 value_check = ICData::New(flow_graph->function(), i_call->function_name(), |
| 2704 i_call->function_name(), | |
| 2705 Object::empty_array(), // Dummy args. descr. | 2536 Object::empty_array(), // Dummy args. descr. |
| 2706 Thread::kNoDeoptId, | 2537 Thread::kNoDeoptId, 1, false); |
| 2707 1, | |
| 2708 false); | |
| 2709 value_check.AddReceiverCheck(kInt32x4Cid, target); | 2538 value_check.AddReceiverCheck(kInt32x4Cid, target); |
| 2710 break; | 2539 break; |
| 2711 } | 2540 } |
| 2712 case kTypedDataFloat32x4ArrayCid: { | 2541 case kTypedDataFloat32x4ArrayCid: { |
| 2713 // Check that value is always Float32x4. | 2542 // Check that value is always Float32x4. |
| 2714 value_check = ICData::New(flow_graph->function(), | 2543 value_check = ICData::New(flow_graph->function(), i_call->function_name(), |
| 2715 i_call->function_name(), | |
| 2716 Object::empty_array(), // Dummy args. descr. | 2544 Object::empty_array(), // Dummy args. descr. |
| 2717 Thread::kNoDeoptId, | 2545 Thread::kNoDeoptId, 1, false); |
| 2718 1, | |
| 2719 false); | |
| 2720 value_check.AddReceiverCheck(kFloat32x4Cid, target); | 2546 value_check.AddReceiverCheck(kFloat32x4Cid, target); |
| 2721 break; | 2547 break; |
| 2722 } | 2548 } |
| 2723 default: | 2549 default: |
| 2724 // Array cids are already checked in the caller. | 2550 // Array cids are already checked in the caller. |
| 2725 UNREACHABLE(); | 2551 UNREACHABLE(); |
| 2726 } | 2552 } |
| 2727 | 2553 |
| 2728 Definition* stored_value = call->ArgumentAt(2); | 2554 Definition* stored_value = call->ArgumentAt(2); |
| 2729 if (!value_check.IsNull()) { | 2555 if (!value_check.IsNull()) { |
| 2730 Instruction* check = GetCheckClass(flow_graph, | 2556 Instruction* check = GetCheckClass(flow_graph, stored_value, value_check, |
| 2731 stored_value, | 2557 call->deopt_id(), call->token_pos()); |
| 2732 value_check, | 2558 cursor = |
| 2733 call->deopt_id(), | 2559 flow_graph->AppendTo(cursor, check, call->env(), FlowGraph::kEffect); |
| 2734 call->token_pos()); | |
| 2735 cursor = flow_graph->AppendTo(cursor, | |
| 2736 check, | |
| 2737 call->env(), | |
| 2738 FlowGraph::kEffect); | |
| 2739 } | 2560 } |
| 2740 | 2561 |
| 2741 if (view_cid == kTypedDataFloat32ArrayCid) { | 2562 if (view_cid == kTypedDataFloat32ArrayCid) { |
| 2742 stored_value = new(Z) DoubleToFloatInstr( | 2563 stored_value = new (Z) |
| 2743 new(Z) Value(stored_value), call->deopt_id()); | 2564 DoubleToFloatInstr(new (Z) Value(stored_value), call->deopt_id()); |
| 2744 cursor = flow_graph->AppendTo(cursor, | 2565 cursor = |
| 2745 stored_value, | 2566 flow_graph->AppendTo(cursor, stored_value, NULL, FlowGraph::kValue); |
| 2746 NULL, | |
| 2747 FlowGraph::kValue); | |
| 2748 } else if (view_cid == kTypedDataInt32ArrayCid) { | 2567 } else if (view_cid == kTypedDataInt32ArrayCid) { |
| 2749 stored_value = new(Z) UnboxInt32Instr( | 2568 stored_value = |
| 2750 UnboxInt32Instr::kTruncate, | 2569 new (Z) UnboxInt32Instr(UnboxInt32Instr::kTruncate, |
| 2751 new(Z) Value(stored_value), | 2570 new (Z) Value(stored_value), call->deopt_id()); |
| 2752 call->deopt_id()); | 2571 cursor = flow_graph->AppendTo(cursor, stored_value, call->env(), |
| 2753 cursor = flow_graph->AppendTo(cursor, | |
| 2754 stored_value, | |
| 2755 call->env(), | |
| 2756 FlowGraph::kValue); | 2572 FlowGraph::kValue); |
| 2757 } else if (view_cid == kTypedDataUint32ArrayCid) { | 2573 } else if (view_cid == kTypedDataUint32ArrayCid) { |
| 2758 stored_value = new(Z) UnboxUint32Instr( | 2574 stored_value = |
| 2759 new(Z) Value(stored_value), | 2575 new (Z) UnboxUint32Instr(new (Z) Value(stored_value), call->deopt_id()); |
| 2760 call->deopt_id()); | |
| 2761 ASSERT(stored_value->AsUnboxInteger()->is_truncating()); | 2576 ASSERT(stored_value->AsUnboxInteger()->is_truncating()); |
| 2762 cursor = flow_graph->AppendTo(cursor, | 2577 cursor = flow_graph->AppendTo(cursor, stored_value, call->env(), |
| 2763 stored_value, | |
| 2764 call->env(), | |
| 2765 FlowGraph::kValue); | 2578 FlowGraph::kValue); |
| 2766 } | 2579 } |
| 2767 | 2580 |
| 2768 StoreBarrierType needs_store_barrier = kNoStoreBarrier; | 2581 StoreBarrierType needs_store_barrier = kNoStoreBarrier; |
| 2769 *last = new(Z) StoreIndexedInstr(new(Z) Value(array), | 2582 *last = new (Z) StoreIndexedInstr( |
| 2770 new(Z) Value(index), | 2583 new (Z) Value(array), new (Z) Value(index), new (Z) Value(stored_value), |
| 2771 new(Z) Value(stored_value), | 2584 needs_store_barrier, |
| 2772 needs_store_barrier, | 2585 1, // Index scale |
| 2773 1, // Index scale | 2586 view_cid, kUnalignedAccess, call->deopt_id(), call->token_pos()); |
| 2774 view_cid, | |
| 2775 kUnalignedAccess, | |
| 2776 call->deopt_id(), | |
| 2777 call->token_pos()); | |
| 2778 | 2587 |
| 2779 flow_graph->AppendTo(cursor, | 2588 flow_graph->AppendTo( |
| 2780 *last, | 2589 cursor, *last, |
| 2781 call->deopt_id() != Thread::kNoDeoptId ? | 2590 call->deopt_id() != Thread::kNoDeoptId ? call->env() : NULL, |
| 2782 call->env() : NULL, | 2591 FlowGraph::kEffect); |
| 2783 FlowGraph::kEffect); | |
| 2784 return true; | 2592 return true; |
| 2785 } | 2593 } |
| 2786 | 2594 |
| 2787 | 2595 |
| 2788 | |
| 2789 // Returns the LoadIndexedInstr. | 2596 // Returns the LoadIndexedInstr. |
| 2790 static Definition* PrepareInlineStringIndexOp( | 2597 static Definition* PrepareInlineStringIndexOp(FlowGraph* flow_graph, |
| 2791 FlowGraph* flow_graph, | 2598 Instruction* call, |
| 2792 Instruction* call, | 2599 intptr_t cid, |
| 2793 intptr_t cid, | 2600 Definition* str, |
| 2794 Definition* str, | 2601 Definition* index, |
| 2795 Definition* index, | 2602 Instruction* cursor) { |
| 2796 Instruction* cursor) { | |
| 2797 // Load the length of the string. | 2603 // Load the length of the string. |
| 2798 // Treat length loads as mutable (i.e. affected by side effects) to avoid | 2604 // Treat length loads as mutable (i.e. affected by side effects) to avoid |
| 2799 // hoisting them since we can't hoist the preceding class-check. This | 2605 // hoisting them since we can't hoist the preceding class-check. This |
| 2800 // is because of externalization of strings that affects their class-id. | 2606 // is because of externalization of strings that affects their class-id. |
| 2801 LoadFieldInstr* length = new(Z) LoadFieldInstr( | 2607 LoadFieldInstr* length = new (Z) |
| 2802 new(Z) Value(str), | 2608 LoadFieldInstr(new (Z) Value(str), String::length_offset(), |
| 2803 String::length_offset(), | 2609 Type::ZoneHandle(Z, Type::SmiType()), str->token_pos()); |
| 2804 Type::ZoneHandle(Z, Type::SmiType()), | |
| 2805 str->token_pos()); | |
| 2806 length->set_result_cid(kSmiCid); | 2610 length->set_result_cid(kSmiCid); |
| 2807 length->set_is_immutable(!FLAG_support_externalizable_strings); | 2611 length->set_is_immutable(!FLAG_support_externalizable_strings); |
| 2808 length->set_recognized_kind(MethodRecognizer::kStringBaseLength); | 2612 length->set_recognized_kind(MethodRecognizer::kStringBaseLength); |
| 2809 | 2613 |
| 2810 cursor = flow_graph->AppendTo(cursor, length, NULL, FlowGraph::kValue); | 2614 cursor = flow_graph->AppendTo(cursor, length, NULL, FlowGraph::kValue); |
| 2811 // Bounds check. | 2615 // Bounds check. |
| 2812 cursor = flow_graph->AppendTo(cursor, | 2616 cursor = flow_graph->AppendTo( |
| 2813 new(Z) CheckArrayBoundInstr( | 2617 cursor, |
| 2814 new(Z) Value(length), | 2618 new (Z) CheckArrayBoundInstr(new (Z) Value(length), new (Z) Value(index), |
| 2815 new(Z) Value(index), | 2619 call->deopt_id()), |
| 2816 call->deopt_id()), | 2620 call->env(), FlowGraph::kEffect); |
| 2817 call->env(), | |
| 2818 FlowGraph::kEffect); | |
| 2819 | 2621 |
| 2820 // For external strings: Load backing store. | 2622 // For external strings: Load backing store. |
| 2821 if (cid == kExternalOneByteStringCid) { | 2623 if (cid == kExternalOneByteStringCid) { |
| 2822 str = new LoadUntaggedInstr(new Value(str), | 2624 str = new LoadUntaggedInstr(new Value(str), |
| 2823 ExternalOneByteString::external_data_offset()); | 2625 ExternalOneByteString::external_data_offset()); |
| 2824 cursor = flow_graph->AppendTo(cursor, str, NULL, FlowGraph::kValue); | 2626 cursor = flow_graph->AppendTo(cursor, str, NULL, FlowGraph::kValue); |
| 2825 str = new LoadUntaggedInstr( | 2627 str = new LoadUntaggedInstr( |
| 2826 new Value(str), | 2628 new Value(str), RawExternalOneByteString::ExternalData::data_offset()); |
| 2827 RawExternalOneByteString::ExternalData::data_offset()); | |
| 2828 cursor = flow_graph->AppendTo(cursor, str, NULL, FlowGraph::kValue); | 2629 cursor = flow_graph->AppendTo(cursor, str, NULL, FlowGraph::kValue); |
| 2829 } else if (cid == kExternalTwoByteStringCid) { | 2630 } else if (cid == kExternalTwoByteStringCid) { |
| 2830 str = new LoadUntaggedInstr(new Value(str), | 2631 str = new LoadUntaggedInstr(new Value(str), |
| 2831 ExternalTwoByteString::external_data_offset()); | 2632 ExternalTwoByteString::external_data_offset()); |
| 2832 cursor = flow_graph->AppendTo(cursor, str, NULL, FlowGraph::kValue); | 2633 cursor = flow_graph->AppendTo(cursor, str, NULL, FlowGraph::kValue); |
| 2833 str = new LoadUntaggedInstr( | 2634 str = new LoadUntaggedInstr( |
| 2834 new Value(str), | 2635 new Value(str), RawExternalTwoByteString::ExternalData::data_offset()); |
| 2835 RawExternalTwoByteString::ExternalData::data_offset()); | |
| 2836 cursor = flow_graph->AppendTo(cursor, str, NULL, FlowGraph::kValue); | 2636 cursor = flow_graph->AppendTo(cursor, str, NULL, FlowGraph::kValue); |
| 2837 } | 2637 } |
| 2838 | 2638 |
| 2839 LoadIndexedInstr* load_indexed = new(Z) LoadIndexedInstr( | 2639 LoadIndexedInstr* load_indexed = new (Z) LoadIndexedInstr( |
| 2840 new(Z) Value(str), | 2640 new (Z) Value(str), new (Z) Value(index), Instance::ElementSizeFor(cid), |
| 2841 new(Z) Value(index), | 2641 cid, kAlignedAccess, Thread::kNoDeoptId, call->token_pos()); |
| 2842 Instance::ElementSizeFor(cid), | |
| 2843 cid, | |
| 2844 kAlignedAccess, | |
| 2845 Thread::kNoDeoptId, | |
| 2846 call->token_pos()); | |
| 2847 | 2642 |
| 2848 cursor = flow_graph->AppendTo(cursor, load_indexed, NULL, FlowGraph::kValue); | 2643 cursor = flow_graph->AppendTo(cursor, load_indexed, NULL, FlowGraph::kValue); |
| 2849 ASSERT(cursor == load_indexed); | 2644 ASSERT(cursor == load_indexed); |
| 2850 return load_indexed; | 2645 return load_indexed; |
| 2851 } | 2646 } |
| 2852 | 2647 |
| 2853 | 2648 |
| 2854 static bool InlineStringBaseCharAt( | 2649 static bool InlineStringBaseCharAt(FlowGraph* flow_graph, |
| 2855 FlowGraph* flow_graph, | 2650 Instruction* call, |
| 2856 Instruction* call, | 2651 intptr_t cid, |
| 2857 intptr_t cid, | 2652 TargetEntryInstr** entry, |
| 2858 TargetEntryInstr** entry, | 2653 Definition** last) { |
| 2859 Definition** last) { | |
| 2860 if ((cid != kOneByteStringCid) && (cid != kExternalOneByteStringCid)) { | 2654 if ((cid != kOneByteStringCid) && (cid != kExternalOneByteStringCid)) { |
| 2861 return false; | 2655 return false; |
| 2862 } | 2656 } |
| 2863 Definition* str = call->ArgumentAt(0); | 2657 Definition* str = call->ArgumentAt(0); |
| 2864 Definition* index = call->ArgumentAt(1); | 2658 Definition* index = call->ArgumentAt(1); |
| 2865 | 2659 |
| 2866 *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(), | 2660 *entry = new (Z) TargetEntryInstr(flow_graph->allocate_block_id(), |
| 2867 call->GetBlock()->try_index()); | 2661 call->GetBlock()->try_index()); |
| 2868 (*entry)->InheritDeoptTarget(Z, call); | 2662 (*entry)->InheritDeoptTarget(Z, call); |
| 2869 | 2663 |
| 2870 *last = PrepareInlineStringIndexOp(flow_graph, call, cid, str, index, *entry); | 2664 *last = PrepareInlineStringIndexOp(flow_graph, call, cid, str, index, *entry); |
| 2871 | 2665 |
| 2872 OneByteStringFromCharCodeInstr* char_at = | 2666 OneByteStringFromCharCodeInstr* char_at = |
| 2873 new(Z) OneByteStringFromCharCodeInstr(new(Z) Value(*last)); | 2667 new (Z) OneByteStringFromCharCodeInstr(new (Z) Value(*last)); |
| 2874 | 2668 |
| 2875 flow_graph->AppendTo(*last, char_at, NULL, FlowGraph::kValue); | 2669 flow_graph->AppendTo(*last, char_at, NULL, FlowGraph::kValue); |
| 2876 *last = char_at; | 2670 *last = char_at; |
| 2877 | 2671 |
| 2878 return true; | 2672 return true; |
| 2879 } | 2673 } |
| 2880 | 2674 |
| 2881 | 2675 |
| 2882 static bool InlineStringCodeUnitAt( | 2676 static bool InlineStringCodeUnitAt(FlowGraph* flow_graph, |
| 2883 FlowGraph* flow_graph, | 2677 Instruction* call, |
| 2884 Instruction* call, | 2678 intptr_t cid, |
| 2885 intptr_t cid, | 2679 TargetEntryInstr** entry, |
| 2886 TargetEntryInstr** entry, | 2680 Definition** last) { |
| 2887 Definition** last) { | 2681 ASSERT((cid == kOneByteStringCid) || (cid == kTwoByteStringCid) || |
| 2888 ASSERT((cid == kOneByteStringCid) || | |
| 2889 (cid == kTwoByteStringCid) || | |
| 2890 (cid == kExternalOneByteStringCid) || | 2682 (cid == kExternalOneByteStringCid) || |
| 2891 (cid == kExternalTwoByteStringCid)); | 2683 (cid == kExternalTwoByteStringCid)); |
| 2892 Definition* str = call->ArgumentAt(0); | 2684 Definition* str = call->ArgumentAt(0); |
| 2893 Definition* index = call->ArgumentAt(1); | 2685 Definition* index = call->ArgumentAt(1); |
| 2894 | 2686 |
| 2895 *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(), | 2687 *entry = new (Z) TargetEntryInstr(flow_graph->allocate_block_id(), |
| 2896 call->GetBlock()->try_index()); | 2688 call->GetBlock()->try_index()); |
| 2897 (*entry)->InheritDeoptTarget(Z, call); | 2689 (*entry)->InheritDeoptTarget(Z, call); |
| 2898 | 2690 |
| 2899 *last = PrepareInlineStringIndexOp(flow_graph, call, cid, str, index, *entry); | 2691 *last = PrepareInlineStringIndexOp(flow_graph, call, cid, str, index, *entry); |
| 2900 | 2692 |
| 2901 return true; | 2693 return true; |
| 2902 } | 2694 } |
| 2903 | 2695 |
| 2904 | 2696 |
| 2905 bool FlowGraphInliner::TryReplaceInstanceCallWithInline( | 2697 bool FlowGraphInliner::TryReplaceInstanceCallWithInline( |
| 2906 FlowGraph* flow_graph, | 2698 FlowGraph* flow_graph, |
| 2907 ForwardInstructionIterator* iterator, | 2699 ForwardInstructionIterator* iterator, |
| 2908 InstanceCallInstr* call) { | 2700 InstanceCallInstr* call) { |
| 2909 Function& target = Function::Handle(Z); | 2701 Function& target = Function::Handle(Z); |
| 2910 GrowableArray<intptr_t> class_ids; | 2702 GrowableArray<intptr_t> class_ids; |
| 2911 call->ic_data()->GetCheckAt(0, &class_ids, &target); | 2703 call->ic_data()->GetCheckAt(0, &class_ids, &target); |
| 2912 const intptr_t receiver_cid = class_ids[0]; | 2704 const intptr_t receiver_cid = class_ids[0]; |
| 2913 | 2705 |
| 2914 TargetEntryInstr* entry; | 2706 TargetEntryInstr* entry; |
| 2915 Definition* last; | 2707 Definition* last; |
| 2916 if (FlowGraphInliner::TryInlineRecognizedMethod(flow_graph, | 2708 if (FlowGraphInliner::TryInlineRecognizedMethod( |
| 2917 receiver_cid, | 2709 flow_graph, receiver_cid, target, call, call->ArgumentAt(0), |
| 2918 target, | 2710 call->token_pos(), *call->ic_data(), &entry, &last)) { |
| 2919 call, | |
| 2920 call->ArgumentAt(0), | |
| 2921 call->token_pos(), | |
| 2922 *call->ic_data(), | |
| 2923 &entry, &last)) { | |
| 2924 // Insert receiver class check if needed. | 2711 // Insert receiver class check if needed. |
| 2925 if (MethodRecognizer::PolymorphicTarget(target) || | 2712 if (MethodRecognizer::PolymorphicTarget(target) || |
| 2926 flow_graph->InstanceCallNeedsClassCheck(call, target.kind())) { | 2713 flow_graph->InstanceCallNeedsClassCheck(call, target.kind())) { |
| 2927 Instruction* check = GetCheckClass( | 2714 Instruction* check = GetCheckClass( |
| 2928 flow_graph, | 2715 flow_graph, call->ArgumentAt(0), |
| 2929 call->ArgumentAt(0), | |
| 2930 ICData::ZoneHandle(Z, call->ic_data()->AsUnaryClassChecks()), | 2716 ICData::ZoneHandle(Z, call->ic_data()->AsUnaryClassChecks()), |
| 2931 call->deopt_id(), | 2717 call->deopt_id(), call->token_pos()); |
| 2932 call->token_pos()); | |
| 2933 flow_graph->InsertBefore(call, check, call->env(), FlowGraph::kEffect); | 2718 flow_graph->InsertBefore(call, check, call->env(), FlowGraph::kEffect); |
| 2934 } | 2719 } |
| 2935 | 2720 |
| 2936 // Remove the original push arguments. | 2721 // Remove the original push arguments. |
| 2937 for (intptr_t i = 0; i < call->ArgumentCount(); ++i) { | 2722 for (intptr_t i = 0; i < call->ArgumentCount(); ++i) { |
| 2938 PushArgumentInstr* push = call->PushArgumentAt(i); | 2723 PushArgumentInstr* push = call->PushArgumentAt(i); |
| 2939 push->ReplaceUsesWith(push->value()->definition()); | 2724 push->ReplaceUsesWith(push->value()->definition()); |
| 2940 push->RemoveFromGraph(); | 2725 push->RemoveFromGraph(); |
| 2941 } | 2726 } |
| 2942 // Replace all uses of this definition with the result. | 2727 // Replace all uses of this definition with the result. |
| (...skipping 19 matching lines...) Expand all Loading... |
| 2962 return false; | 2747 return false; |
| 2963 } | 2748 } |
| 2964 | 2749 |
| 2965 | 2750 |
| 2966 bool FlowGraphInliner::TryReplaceStaticCallWithInline( | 2751 bool FlowGraphInliner::TryReplaceStaticCallWithInline( |
| 2967 FlowGraph* flow_graph, | 2752 FlowGraph* flow_graph, |
| 2968 ForwardInstructionIterator* iterator, | 2753 ForwardInstructionIterator* iterator, |
| 2969 StaticCallInstr* call) { | 2754 StaticCallInstr* call) { |
| 2970 TargetEntryInstr* entry; | 2755 TargetEntryInstr* entry; |
| 2971 Definition* last; | 2756 Definition* last; |
| 2972 if (FlowGraphInliner::TryInlineRecognizedMethod(flow_graph, | 2757 if (FlowGraphInliner::TryInlineRecognizedMethod( |
| 2973 kIllegalCid, | 2758 flow_graph, kIllegalCid, call->function(), call, call->ArgumentAt(0), |
| 2974 call->function(), | 2759 call->token_pos(), *call->ic_data(), &entry, &last)) { |
| 2975 call, | |
| 2976 call->ArgumentAt(0), | |
| 2977 call->token_pos(), | |
| 2978 *call->ic_data(), | |
| 2979 &entry, &last)) { | |
| 2980 // Remove the original push arguments. | 2760 // Remove the original push arguments. |
| 2981 for (intptr_t i = 0; i < call->ArgumentCount(); ++i) { | 2761 for (intptr_t i = 0; i < call->ArgumentCount(); ++i) { |
| 2982 PushArgumentInstr* push = call->PushArgumentAt(i); | 2762 PushArgumentInstr* push = call->PushArgumentAt(i); |
| 2983 push->ReplaceUsesWith(push->value()->definition()); | 2763 push->ReplaceUsesWith(push->value()->definition()); |
| 2984 push->RemoveFromGraph(); | 2764 push->RemoveFromGraph(); |
| 2985 } | 2765 } |
| 2986 // Replace all uses of this definition with the result. | 2766 // Replace all uses of this definition with the result. |
| 2987 if (call->HasUses()) { | 2767 if (call->HasUses()) { |
| 2988 call->ReplaceUsesWith(last); | 2768 call->ReplaceUsesWith(last); |
| 2989 } | 2769 } |
| (...skipping 18 matching lines...) Expand all Loading... |
| 3008 | 2788 |
| 3009 | 2789 |
| 3010 static bool InlineFloat32x4Method(FlowGraph* flow_graph, | 2790 static bool InlineFloat32x4Method(FlowGraph* flow_graph, |
| 3011 Instruction* call, | 2791 Instruction* call, |
| 3012 MethodRecognizer::Kind kind, | 2792 MethodRecognizer::Kind kind, |
| 3013 TargetEntryInstr** entry, | 2793 TargetEntryInstr** entry, |
| 3014 Definition** last) { | 2794 Definition** last) { |
| 3015 if (!ShouldInlineSimd()) { | 2795 if (!ShouldInlineSimd()) { |
| 3016 return false; | 2796 return false; |
| 3017 } | 2797 } |
| 3018 *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(), | 2798 *entry = new (Z) TargetEntryInstr(flow_graph->allocate_block_id(), |
| 3019 call->GetBlock()->try_index()); | 2799 call->GetBlock()->try_index()); |
| 3020 (*entry)->InheritDeoptTarget(Z, call); | 2800 (*entry)->InheritDeoptTarget(Z, call); |
| 3021 Instruction* cursor = *entry; | 2801 Instruction* cursor = *entry; |
| 3022 switch (kind) { | 2802 switch (kind) { |
| 3023 case MethodRecognizer::kFloat32x4ShuffleX: | 2803 case MethodRecognizer::kFloat32x4ShuffleX: |
| 3024 case MethodRecognizer::kFloat32x4ShuffleY: | 2804 case MethodRecognizer::kFloat32x4ShuffleY: |
| 3025 case MethodRecognizer::kFloat32x4ShuffleZ: | 2805 case MethodRecognizer::kFloat32x4ShuffleZ: |
| 3026 case MethodRecognizer::kFloat32x4ShuffleW: { | 2806 case MethodRecognizer::kFloat32x4ShuffleW: { |
| 3027 *last = new(Z) Simd32x4ShuffleInstr(kind, | 2807 *last = |
| 3028 new(Z) Value(call->ArgumentAt(0)), | 2808 new (Z) Simd32x4ShuffleInstr(kind, new (Z) Value(call->ArgumentAt(0)), |
| 3029 0, // mask ignored. | 2809 0, // mask ignored. |
| 3030 call->deopt_id()); | 2810 call->deopt_id()); |
| 3031 break; | 2811 break; |
| 3032 } | 2812 } |
| 3033 case MethodRecognizer::kFloat32x4GetSignMask: { | 2813 case MethodRecognizer::kFloat32x4GetSignMask: { |
| 3034 *last = new(Z) Simd32x4GetSignMaskInstr(kind, | 2814 *last = new (Z) Simd32x4GetSignMaskInstr( |
| 3035 new(Z) Value(call->ArgumentAt(0)), | 2815 kind, new (Z) Value(call->ArgumentAt(0)), call->deopt_id()); |
| 3036 call->deopt_id()); | |
| 3037 break; | 2816 break; |
| 3038 } | 2817 } |
| 3039 case MethodRecognizer::kFloat32x4Equal: | 2818 case MethodRecognizer::kFloat32x4Equal: |
| 3040 case MethodRecognizer::kFloat32x4GreaterThan: | 2819 case MethodRecognizer::kFloat32x4GreaterThan: |
| 3041 case MethodRecognizer::kFloat32x4GreaterThanOrEqual: | 2820 case MethodRecognizer::kFloat32x4GreaterThanOrEqual: |
| 3042 case MethodRecognizer::kFloat32x4LessThan: | 2821 case MethodRecognizer::kFloat32x4LessThan: |
| 3043 case MethodRecognizer::kFloat32x4LessThanOrEqual: | 2822 case MethodRecognizer::kFloat32x4LessThanOrEqual: |
| 3044 case MethodRecognizer::kFloat32x4NotEqual: { | 2823 case MethodRecognizer::kFloat32x4NotEqual: { |
| 3045 Definition* left = call->ArgumentAt(0); | 2824 Definition* left = call->ArgumentAt(0); |
| 3046 Definition* right = call->ArgumentAt(1); | 2825 Definition* right = call->ArgumentAt(1); |
| 3047 *last = new(Z) Float32x4ComparisonInstr(kind, | 2826 *last = new (Z) Float32x4ComparisonInstr( |
| 3048 new(Z) Value(left), | 2827 kind, new (Z) Value(left), new (Z) Value(right), call->deopt_id()); |
| 3049 new(Z) Value(right), | |
| 3050 call->deopt_id()); | |
| 3051 break; | 2828 break; |
| 3052 } | 2829 } |
| 3053 case MethodRecognizer::kFloat32x4Min: | 2830 case MethodRecognizer::kFloat32x4Min: |
| 3054 case MethodRecognizer::kFloat32x4Max: { | 2831 case MethodRecognizer::kFloat32x4Max: { |
| 3055 Definition* left = call->ArgumentAt(0); | 2832 Definition* left = call->ArgumentAt(0); |
| 3056 Definition* right = call->ArgumentAt(1); | 2833 Definition* right = call->ArgumentAt(1); |
| 3057 *last = new(Z) Float32x4MinMaxInstr(kind, | 2834 *last = new (Z) Float32x4MinMaxInstr( |
| 3058 new(Z) Value(left), | 2835 kind, new (Z) Value(left), new (Z) Value(right), call->deopt_id()); |
| 3059 new(Z) Value(right), | |
| 3060 call->deopt_id()); | |
| 3061 break; | 2836 break; |
| 3062 } | 2837 } |
| 3063 case MethodRecognizer::kFloat32x4Scale: { | 2838 case MethodRecognizer::kFloat32x4Scale: { |
| 3064 Definition* left = call->ArgumentAt(0); | 2839 Definition* left = call->ArgumentAt(0); |
| 3065 Definition* right = call->ArgumentAt(1); | 2840 Definition* right = call->ArgumentAt(1); |
| 3066 // Left and right values are swapped when handed to the instruction, | 2841 // Left and right values are swapped when handed to the instruction, |
| 3067 // this is done so that the double value is loaded into the output | 2842 // this is done so that the double value is loaded into the output |
| 3068 // register and can be destroyed. | 2843 // register and can be destroyed. |
| 3069 *last = new(Z) Float32x4ScaleInstr(kind, | 2844 *last = new (Z) Float32x4ScaleInstr( |
| 3070 new(Z) Value(right), | 2845 kind, new (Z) Value(right), new (Z) Value(left), call->deopt_id()); |
| 3071 new(Z) Value(left), | |
| 3072 call->deopt_id()); | |
| 3073 break; | 2846 break; |
| 3074 } | 2847 } |
| 3075 case MethodRecognizer::kFloat32x4Sqrt: | 2848 case MethodRecognizer::kFloat32x4Sqrt: |
| 3076 case MethodRecognizer::kFloat32x4ReciprocalSqrt: | 2849 case MethodRecognizer::kFloat32x4ReciprocalSqrt: |
| 3077 case MethodRecognizer::kFloat32x4Reciprocal: { | 2850 case MethodRecognizer::kFloat32x4Reciprocal: { |
| 3078 Definition* left = call->ArgumentAt(0); | 2851 Definition* left = call->ArgumentAt(0); |
| 3079 *last = new(Z) Float32x4SqrtInstr(kind, | 2852 *last = new (Z) |
| 3080 new(Z) Value(left), | 2853 Float32x4SqrtInstr(kind, new (Z) Value(left), call->deopt_id()); |
| 3081 call->deopt_id()); | |
| 3082 break; | 2854 break; |
| 3083 } | 2855 } |
| 3084 case MethodRecognizer::kFloat32x4WithX: | 2856 case MethodRecognizer::kFloat32x4WithX: |
| 3085 case MethodRecognizer::kFloat32x4WithY: | 2857 case MethodRecognizer::kFloat32x4WithY: |
| 3086 case MethodRecognizer::kFloat32x4WithZ: | 2858 case MethodRecognizer::kFloat32x4WithZ: |
| 3087 case MethodRecognizer::kFloat32x4WithW: { | 2859 case MethodRecognizer::kFloat32x4WithW: { |
| 3088 Definition* left = call->ArgumentAt(0); | 2860 Definition* left = call->ArgumentAt(0); |
| 3089 Definition* right = call->ArgumentAt(1); | 2861 Definition* right = call->ArgumentAt(1); |
| 3090 *last = new(Z) Float32x4WithInstr(kind, | 2862 *last = new (Z) Float32x4WithInstr( |
| 3091 new(Z) Value(left), | 2863 kind, new (Z) Value(left), new (Z) Value(right), call->deopt_id()); |
| 3092 new(Z) Value(right), | |
| 3093 call->deopt_id()); | |
| 3094 break; | 2864 break; |
| 3095 } | 2865 } |
| 3096 case MethodRecognizer::kFloat32x4Absolute: | 2866 case MethodRecognizer::kFloat32x4Absolute: |
| 3097 case MethodRecognizer::kFloat32x4Negate: { | 2867 case MethodRecognizer::kFloat32x4Negate: { |
| 3098 Definition* left = call->ArgumentAt(0); | 2868 Definition* left = call->ArgumentAt(0); |
| 3099 *last = new(Z) Float32x4ZeroArgInstr(kind, | 2869 *last = new (Z) |
| 3100 new(Z) Value(left), | 2870 Float32x4ZeroArgInstr(kind, new (Z) Value(left), call->deopt_id()); |
| 3101 call->deopt_id()); | |
| 3102 break; | 2871 break; |
| 3103 } | 2872 } |
| 3104 case MethodRecognizer::kFloat32x4Clamp: { | 2873 case MethodRecognizer::kFloat32x4Clamp: { |
| 3105 Definition* left = call->ArgumentAt(0); | 2874 Definition* left = call->ArgumentAt(0); |
| 3106 Definition* lower = call->ArgumentAt(1); | 2875 Definition* lower = call->ArgumentAt(1); |
| 3107 Definition* upper = call->ArgumentAt(2); | 2876 Definition* upper = call->ArgumentAt(2); |
| 3108 *last = new(Z) Float32x4ClampInstr( | 2877 *last = |
| 3109 new(Z) Value(left), | 2878 new (Z) Float32x4ClampInstr(new (Z) Value(left), new (Z) Value(lower), |
| 3110 new(Z) Value(lower), | 2879 new (Z) Value(upper), call->deopt_id()); |
| 3111 new(Z) Value(upper), | |
| 3112 call->deopt_id()); | |
| 3113 break; | 2880 break; |
| 3114 } | 2881 } |
| 3115 default: | 2882 default: |
| 3116 UNREACHABLE(); | 2883 UNREACHABLE(); |
| 3117 return false; | 2884 return false; |
| 3118 } | 2885 } |
| 3119 flow_graph->AppendTo(cursor, *last, | 2886 flow_graph->AppendTo( |
| 3120 call->deopt_id() != Thread::kNoDeoptId ? | 2887 cursor, *last, |
| 3121 call->env() : NULL, | 2888 call->deopt_id() != Thread::kNoDeoptId ? call->env() : NULL, |
| 3122 FlowGraph::kValue); | 2889 FlowGraph::kValue); |
| 3123 return true; | 2890 return true; |
| 3124 } | 2891 } |
| 3125 | 2892 |
| 3126 | 2893 |
| 3127 static bool CheckMask(Definition* definition, intptr_t* mask_ptr) { | 2894 static bool CheckMask(Definition* definition, intptr_t* mask_ptr) { |
| 3128 if (!definition->IsConstant()) return false; | 2895 if (!definition->IsConstant()) return false; |
| 3129 ConstantInstr* constant_instruction = definition->AsConstant(); | 2896 ConstantInstr* constant_instruction = definition->AsConstant(); |
| 3130 const Object& constant_mask = constant_instruction->value(); | 2897 const Object& constant_mask = constant_instruction->value(); |
| 3131 if (!constant_mask.IsSmi()) return false; | 2898 if (!constant_mask.IsSmi()) return false; |
| 3132 const intptr_t mask = Smi::Cast(constant_mask).Value(); | 2899 const intptr_t mask = Smi::Cast(constant_mask).Value(); |
| 3133 if ((mask < 0) || (mask > 255)) { | 2900 if ((mask < 0) || (mask > 255)) { |
| 3134 return false; // Not a valid mask. | 2901 return false; // Not a valid mask. |
| 3135 } | 2902 } |
| 3136 *mask_ptr = mask; | 2903 *mask_ptr = mask; |
| 3137 return true; | 2904 return true; |
| 3138 } | 2905 } |
| 3139 | 2906 |
| 3140 | 2907 |
| 3141 static bool InlineSimdShuffleMethod(FlowGraph* flow_graph, | 2908 static bool InlineSimdShuffleMethod(FlowGraph* flow_graph, |
| 3142 Instruction* call, | 2909 Instruction* call, |
| 3143 MethodRecognizer::Kind kind, | 2910 MethodRecognizer::Kind kind, |
| 3144 TargetEntryInstr** entry, | 2911 TargetEntryInstr** entry, |
| 3145 Definition** last) { | 2912 Definition** last) { |
| 3146 if (!ShouldInlineSimd()) { | 2913 if (!ShouldInlineSimd()) { |
| 3147 return false; | 2914 return false; |
| 3148 } | 2915 } |
| 3149 *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(), | 2916 *entry = new (Z) TargetEntryInstr(flow_graph->allocate_block_id(), |
| 3150 call->GetBlock()->try_index()); | 2917 call->GetBlock()->try_index()); |
| 3151 (*entry)->InheritDeoptTarget(Z, call); | 2918 (*entry)->InheritDeoptTarget(Z, call); |
| 3152 Instruction* cursor = *entry; | 2919 Instruction* cursor = *entry; |
| 3153 Definition* mask_definition = call->ArgumentAt(1); | 2920 Definition* mask_definition = call->ArgumentAt(1); |
| 3154 intptr_t mask = 0; | 2921 intptr_t mask = 0; |
| 3155 if (!CheckMask(mask_definition, &mask)) { | 2922 if (!CheckMask(mask_definition, &mask)) { |
| 3156 return false; | 2923 return false; |
| 3157 } | 2924 } |
| 3158 *last = new(Z) Simd32x4ShuffleInstr( | 2925 *last = new (Z) Simd32x4ShuffleInstr(kind, new (Z) Value(call->ArgumentAt(0)), |
| 3159 kind, | 2926 mask, call->deopt_id()); |
| 3160 new(Z) Value(call->ArgumentAt(0)), | 2927 flow_graph->AppendTo( |
| 3161 mask, | 2928 cursor, *last, |
| 3162 call->deopt_id()); | 2929 call->deopt_id() != Thread::kNoDeoptId ? call->env() : NULL, |
| 3163 flow_graph->AppendTo(cursor, *last, | 2930 FlowGraph::kValue); |
| 3164 call->deopt_id() != Thread::kNoDeoptId ? | |
| 3165 call->env() : NULL, | |
| 3166 FlowGraph::kValue); | |
| 3167 return true; | 2931 return true; |
| 3168 } | 2932 } |
| 3169 | 2933 |
| 3170 | 2934 |
| 3171 static bool InlineSimdShuffleMixMethod(FlowGraph* flow_graph, | 2935 static bool InlineSimdShuffleMixMethod(FlowGraph* flow_graph, |
| 3172 Instruction* call, | 2936 Instruction* call, |
| 3173 MethodRecognizer::Kind kind, | 2937 MethodRecognizer::Kind kind, |
| 3174 TargetEntryInstr** entry, | 2938 TargetEntryInstr** entry, |
| 3175 Definition** last) { | 2939 Definition** last) { |
| 3176 if (!ShouldInlineSimd()) { | 2940 if (!ShouldInlineSimd()) { |
| 3177 return false; | 2941 return false; |
| 3178 } | 2942 } |
| 3179 *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(), | 2943 *entry = new (Z) TargetEntryInstr(flow_graph->allocate_block_id(), |
| 3180 call->GetBlock()->try_index()); | 2944 call->GetBlock()->try_index()); |
| 3181 (*entry)->InheritDeoptTarget(Z, call); | 2945 (*entry)->InheritDeoptTarget(Z, call); |
| 3182 Instruction* cursor = *entry; | 2946 Instruction* cursor = *entry; |
| 3183 Definition* mask_definition = call->ArgumentAt(2); | 2947 Definition* mask_definition = call->ArgumentAt(2); |
| 3184 intptr_t mask = 0; | 2948 intptr_t mask = 0; |
| 3185 if (!CheckMask(mask_definition, &mask)) { | 2949 if (!CheckMask(mask_definition, &mask)) { |
| 3186 return false; | 2950 return false; |
| 3187 } | 2951 } |
| 3188 *last = new(Z) Simd32x4ShuffleMixInstr( | 2952 *last = new (Z) Simd32x4ShuffleMixInstr( |
| 3189 kind, | 2953 kind, new (Z) Value(call->ArgumentAt(0)), |
| 3190 new(Z) Value(call->ArgumentAt(0)), | 2954 new (Z) Value(call->ArgumentAt(1)), mask, call->deopt_id()); |
| 3191 new(Z) Value(call->ArgumentAt(1)), | 2955 flow_graph->AppendTo( |
| 3192 mask, | 2956 cursor, *last, |
| 3193 call->deopt_id()); | 2957 call->deopt_id() != Thread::kNoDeoptId ? call->env() : NULL, |
| 3194 flow_graph->AppendTo(cursor, *last, | 2958 FlowGraph::kValue); |
| 3195 call->deopt_id() != Thread::kNoDeoptId ? | |
| 3196 call->env() : NULL, | |
| 3197 FlowGraph::kValue); | |
| 3198 return true; | 2959 return true; |
| 3199 } | 2960 } |
| 3200 | 2961 |
| 3201 | 2962 |
| 3202 | |
| 3203 static bool InlineInt32x4Method(FlowGraph* flow_graph, | 2963 static bool InlineInt32x4Method(FlowGraph* flow_graph, |
| 3204 Instruction* call, | 2964 Instruction* call, |
| 3205 MethodRecognizer::Kind kind, | 2965 MethodRecognizer::Kind kind, |
| 3206 TargetEntryInstr** entry, | 2966 TargetEntryInstr** entry, |
| 3207 Definition** last) { | 2967 Definition** last) { |
| 3208 if (!ShouldInlineSimd()) { | 2968 if (!ShouldInlineSimd()) { |
| 3209 return false; | 2969 return false; |
| 3210 } | 2970 } |
| 3211 *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(), | 2971 *entry = new (Z) TargetEntryInstr(flow_graph->allocate_block_id(), |
| 3212 call->GetBlock()->try_index()); | 2972 call->GetBlock()->try_index()); |
| 3213 (*entry)->InheritDeoptTarget(Z, call); | 2973 (*entry)->InheritDeoptTarget(Z, call); |
| 3214 Instruction* cursor = *entry; | 2974 Instruction* cursor = *entry; |
| 3215 switch (kind) { | 2975 switch (kind) { |
| 3216 case MethodRecognizer::kInt32x4GetFlagX: | 2976 case MethodRecognizer::kInt32x4GetFlagX: |
| 3217 case MethodRecognizer::kInt32x4GetFlagY: | 2977 case MethodRecognizer::kInt32x4GetFlagY: |
| 3218 case MethodRecognizer::kInt32x4GetFlagZ: | 2978 case MethodRecognizer::kInt32x4GetFlagZ: |
| 3219 case MethodRecognizer::kInt32x4GetFlagW: { | 2979 case MethodRecognizer::kInt32x4GetFlagW: { |
| 3220 *last = new(Z) Int32x4GetFlagInstr( | 2980 *last = new (Z) Int32x4GetFlagInstr( |
| 3221 kind, | 2981 kind, new (Z) Value(call->ArgumentAt(0)), call->deopt_id()); |
| 3222 new(Z) Value(call->ArgumentAt(0)), | |
| 3223 call->deopt_id()); | |
| 3224 break; | 2982 break; |
| 3225 } | 2983 } |
| 3226 case MethodRecognizer::kInt32x4GetSignMask: { | 2984 case MethodRecognizer::kInt32x4GetSignMask: { |
| 3227 *last = new(Z) Simd32x4GetSignMaskInstr( | 2985 *last = new (Z) Simd32x4GetSignMaskInstr( |
| 3228 kind, | 2986 kind, new (Z) Value(call->ArgumentAt(0)), call->deopt_id()); |
| 3229 new(Z) Value(call->ArgumentAt(0)), | |
| 3230 call->deopt_id()); | |
| 3231 break; | 2987 break; |
| 3232 } | 2988 } |
| 3233 case MethodRecognizer::kInt32x4Select: { | 2989 case MethodRecognizer::kInt32x4Select: { |
| 3234 Definition* mask = call->ArgumentAt(0); | 2990 Definition* mask = call->ArgumentAt(0); |
| 3235 Definition* trueValue = call->ArgumentAt(1); | 2991 Definition* trueValue = call->ArgumentAt(1); |
| 3236 Definition* falseValue = call->ArgumentAt(2); | 2992 Definition* falseValue = call->ArgumentAt(2); |
| 3237 *last = new(Z) Int32x4SelectInstr( | 2993 *last = new (Z) |
| 3238 new(Z) Value(mask), | 2994 Int32x4SelectInstr(new (Z) Value(mask), new (Z) Value(trueValue), |
| 3239 new(Z) Value(trueValue), | 2995 new (Z) Value(falseValue), call->deopt_id()); |
| 3240 new(Z) Value(falseValue), | |
| 3241 call->deopt_id()); | |
| 3242 break; | 2996 break; |
| 3243 } | 2997 } |
| 3244 case MethodRecognizer::kInt32x4WithFlagX: | 2998 case MethodRecognizer::kInt32x4WithFlagX: |
| 3245 case MethodRecognizer::kInt32x4WithFlagY: | 2999 case MethodRecognizer::kInt32x4WithFlagY: |
| 3246 case MethodRecognizer::kInt32x4WithFlagZ: | 3000 case MethodRecognizer::kInt32x4WithFlagZ: |
| 3247 case MethodRecognizer::kInt32x4WithFlagW: { | 3001 case MethodRecognizer::kInt32x4WithFlagW: { |
| 3248 *last = new(Z) Int32x4SetFlagInstr( | 3002 *last = new (Z) Int32x4SetFlagInstr( |
| 3249 kind, | 3003 kind, new (Z) Value(call->ArgumentAt(0)), |
| 3250 new(Z) Value(call->ArgumentAt(0)), | 3004 new (Z) Value(call->ArgumentAt(1)), call->deopt_id()); |
| 3251 new(Z) Value(call->ArgumentAt(1)), | |
| 3252 call->deopt_id()); | |
| 3253 break; | 3005 break; |
| 3254 } | 3006 } |
| 3255 default: | 3007 default: |
| 3256 return false; | 3008 return false; |
| 3257 } | 3009 } |
| 3258 flow_graph->AppendTo(cursor, *last, | 3010 flow_graph->AppendTo( |
| 3259 call->deopt_id() != Thread::kNoDeoptId ? | 3011 cursor, *last, |
| 3260 call->env() : NULL, | 3012 call->deopt_id() != Thread::kNoDeoptId ? call->env() : NULL, |
| 3261 FlowGraph::kValue); | 3013 FlowGraph::kValue); |
| 3262 return true; | 3014 return true; |
| 3263 } | 3015 } |
| 3264 | 3016 |
| 3265 | 3017 |
| 3266 static bool InlineFloat64x2Method(FlowGraph* flow_graph, | 3018 static bool InlineFloat64x2Method(FlowGraph* flow_graph, |
| 3267 Instruction* call, | 3019 Instruction* call, |
| 3268 MethodRecognizer::Kind kind, | 3020 MethodRecognizer::Kind kind, |
| 3269 TargetEntryInstr** entry, | 3021 TargetEntryInstr** entry, |
| 3270 Definition** last) { | 3022 Definition** last) { |
| 3271 if (!ShouldInlineSimd()) { | 3023 if (!ShouldInlineSimd()) { |
| 3272 return false; | 3024 return false; |
| 3273 } | 3025 } |
| 3274 *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(), | 3026 *entry = new (Z) TargetEntryInstr(flow_graph->allocate_block_id(), |
| 3275 call->GetBlock()->try_index()); | 3027 call->GetBlock()->try_index()); |
| 3276 (*entry)->InheritDeoptTarget(Z, call); | 3028 (*entry)->InheritDeoptTarget(Z, call); |
| 3277 Instruction* cursor = *entry; | 3029 Instruction* cursor = *entry; |
| 3278 switch (kind) { | 3030 switch (kind) { |
| 3279 case MethodRecognizer::kFloat64x2GetX: | 3031 case MethodRecognizer::kFloat64x2GetX: |
| 3280 case MethodRecognizer::kFloat64x2GetY: { | 3032 case MethodRecognizer::kFloat64x2GetY: { |
| 3281 *last = new(Z) Simd64x2ShuffleInstr( | 3033 *last = |
| 3282 kind, | 3034 new (Z) Simd64x2ShuffleInstr(kind, new (Z) Value(call->ArgumentAt(0)), |
| 3283 new(Z) Value(call->ArgumentAt(0)), | 3035 0, // mask is ignored. |
| 3284 0, // mask is ignored. | 3036 call->deopt_id()); |
| 3285 call->deopt_id()); | |
| 3286 break; | 3037 break; |
| 3287 } | 3038 } |
| 3288 case MethodRecognizer::kFloat64x2Negate: | 3039 case MethodRecognizer::kFloat64x2Negate: |
| 3289 case MethodRecognizer::kFloat64x2Abs: | 3040 case MethodRecognizer::kFloat64x2Abs: |
| 3290 case MethodRecognizer::kFloat64x2Sqrt: | 3041 case MethodRecognizer::kFloat64x2Sqrt: |
| 3291 case MethodRecognizer::kFloat64x2GetSignMask: { | 3042 case MethodRecognizer::kFloat64x2GetSignMask: { |
| 3292 *last = new(Z) Float64x2ZeroArgInstr( | 3043 *last = new (Z) Float64x2ZeroArgInstr( |
| 3293 kind, new(Z) Value(call->ArgumentAt(0)), call->deopt_id()); | 3044 kind, new (Z) Value(call->ArgumentAt(0)), call->deopt_id()); |
| 3294 break; | 3045 break; |
| 3295 } | 3046 } |
| 3296 case MethodRecognizer::kFloat64x2Scale: | 3047 case MethodRecognizer::kFloat64x2Scale: |
| 3297 case MethodRecognizer::kFloat64x2WithX: | 3048 case MethodRecognizer::kFloat64x2WithX: |
| 3298 case MethodRecognizer::kFloat64x2WithY: | 3049 case MethodRecognizer::kFloat64x2WithY: |
| 3299 case MethodRecognizer::kFloat64x2Min: | 3050 case MethodRecognizer::kFloat64x2Min: |
| 3300 case MethodRecognizer::kFloat64x2Max: { | 3051 case MethodRecognizer::kFloat64x2Max: { |
| 3301 Definition* left = call->ArgumentAt(0); | 3052 Definition* left = call->ArgumentAt(0); |
| 3302 Definition* right = call->ArgumentAt(1); | 3053 Definition* right = call->ArgumentAt(1); |
| 3303 *last = new(Z) Float64x2OneArgInstr(kind, | 3054 *last = new (Z) Float64x2OneArgInstr( |
| 3304 new(Z) Value(left), | 3055 kind, new (Z) Value(left), new (Z) Value(right), call->deopt_id()); |
| 3305 new(Z) Value(right), | |
| 3306 call->deopt_id()); | |
| 3307 break; | 3056 break; |
| 3308 } | 3057 } |
| 3309 default: | 3058 default: |
| 3310 UNREACHABLE(); | 3059 UNREACHABLE(); |
| 3311 return false; | 3060 return false; |
| 3312 } | 3061 } |
| 3313 flow_graph->AppendTo(cursor, *last, | 3062 flow_graph->AppendTo( |
| 3314 call->deopt_id() != Thread::kNoDeoptId ? | 3063 cursor, *last, |
| 3315 call->env() : NULL, | 3064 call->deopt_id() != Thread::kNoDeoptId ? call->env() : NULL, |
| 3316 FlowGraph::kValue); | 3065 FlowGraph::kValue); |
| 3317 return true; | 3066 return true; |
| 3318 } | 3067 } |
| 3319 | 3068 |
| 3320 | 3069 |
| 3321 static bool InlineSimdConstructor(FlowGraph* flow_graph, | 3070 static bool InlineSimdConstructor(FlowGraph* flow_graph, |
| 3322 Instruction* call, | 3071 Instruction* call, |
| 3323 MethodRecognizer::Kind kind, | 3072 MethodRecognizer::Kind kind, |
| 3324 TargetEntryInstr** entry, | 3073 TargetEntryInstr** entry, |
| 3325 Definition** last) { | 3074 Definition** last) { |
| 3326 if (!ShouldInlineSimd()) { | 3075 if (!ShouldInlineSimd()) { |
| 3327 return false; | 3076 return false; |
| 3328 } | 3077 } |
| 3329 *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(), | 3078 *entry = new (Z) TargetEntryInstr(flow_graph->allocate_block_id(), |
| 3330 call->GetBlock()->try_index()); | 3079 call->GetBlock()->try_index()); |
| 3331 (*entry)->InheritDeoptTarget(Z, call); | 3080 (*entry)->InheritDeoptTarget(Z, call); |
| 3332 Instruction* cursor = *entry; | 3081 Instruction* cursor = *entry; |
| 3333 switch (kind) { | 3082 switch (kind) { |
| 3334 case MethodRecognizer::kFloat32x4Zero: | 3083 case MethodRecognizer::kFloat32x4Zero: |
| 3335 *last = new(Z) Float32x4ZeroInstr(); | 3084 *last = new (Z) Float32x4ZeroInstr(); |
| 3336 break; | 3085 break; |
| 3337 case MethodRecognizer::kFloat32x4Splat: | 3086 case MethodRecognizer::kFloat32x4Splat: |
| 3338 *last = new(Z) Float32x4SplatInstr(new(Z) Value(call->ArgumentAt(1)), | 3087 *last = new (Z) Float32x4SplatInstr(new (Z) Value(call->ArgumentAt(1)), |
| 3339 call->deopt_id()); | 3088 call->deopt_id()); |
| 3340 break; | 3089 break; |
| 3341 case MethodRecognizer::kFloat32x4Constructor: | 3090 case MethodRecognizer::kFloat32x4Constructor: |
| 3342 *last = new(Z) Float32x4ConstructorInstr( | 3091 *last = new (Z) Float32x4ConstructorInstr( |
| 3343 new(Z) Value(call->ArgumentAt(1)), | 3092 new (Z) Value(call->ArgumentAt(1)), |
| 3344 new(Z) Value(call->ArgumentAt(2)), | 3093 new (Z) Value(call->ArgumentAt(2)), |
| 3345 new(Z) Value(call->ArgumentAt(3)), | 3094 new (Z) Value(call->ArgumentAt(3)), |
| 3346 new(Z) Value(call->ArgumentAt(4)), | 3095 new (Z) Value(call->ArgumentAt(4)), call->deopt_id()); |
| 3347 call->deopt_id()); | |
| 3348 break; | 3096 break; |
| 3349 case MethodRecognizer::kFloat32x4FromInt32x4Bits: | 3097 case MethodRecognizer::kFloat32x4FromInt32x4Bits: |
| 3350 *last = new(Z) Int32x4ToFloat32x4Instr(new(Z) Value(call->ArgumentAt(1)), | 3098 *last = new (Z) Int32x4ToFloat32x4Instr( |
| 3351 call->deopt_id()); | 3099 new (Z) Value(call->ArgumentAt(1)), call->deopt_id()); |
| 3352 break; | 3100 break; |
| 3353 case MethodRecognizer::kFloat32x4FromFloat64x2: | 3101 case MethodRecognizer::kFloat32x4FromFloat64x2: |
| 3354 *last = new(Z) Float64x2ToFloat32x4Instr( | 3102 *last = new (Z) Float64x2ToFloat32x4Instr( |
| 3355 new(Z) Value(call->ArgumentAt(1)), | 3103 new (Z) Value(call->ArgumentAt(1)), call->deopt_id()); |
| 3356 call->deopt_id()); | |
| 3357 break; | 3104 break; |
| 3358 case MethodRecognizer::kFloat64x2Zero: | 3105 case MethodRecognizer::kFloat64x2Zero: |
| 3359 *last = new(Z) Float64x2ZeroInstr(); | 3106 *last = new (Z) Float64x2ZeroInstr(); |
| 3360 break; | 3107 break; |
| 3361 case MethodRecognizer::kFloat64x2Splat: | 3108 case MethodRecognizer::kFloat64x2Splat: |
| 3362 *last = new(Z) Float64x2SplatInstr(new(Z) Value(call->ArgumentAt(1)), | 3109 *last = new (Z) Float64x2SplatInstr(new (Z) Value(call->ArgumentAt(1)), |
| 3363 call->deopt_id()); | 3110 call->deopt_id()); |
| 3364 break; | 3111 break; |
| 3365 case MethodRecognizer::kFloat64x2Constructor: | 3112 case MethodRecognizer::kFloat64x2Constructor: |
| 3366 *last = new(Z) Float64x2ConstructorInstr( | 3113 *last = new (Z) Float64x2ConstructorInstr( |
| 3367 new(Z) Value(call->ArgumentAt(1)), | 3114 new (Z) Value(call->ArgumentAt(1)), |
| 3368 new(Z) Value(call->ArgumentAt(2)), | 3115 new (Z) Value(call->ArgumentAt(2)), call->deopt_id()); |
| 3369 call->deopt_id()); | |
| 3370 break; | 3116 break; |
| 3371 case MethodRecognizer::kFloat64x2FromFloat32x4: | 3117 case MethodRecognizer::kFloat64x2FromFloat32x4: |
| 3372 *last = new(Z) Float32x4ToFloat64x2Instr( | 3118 *last = new (Z) Float32x4ToFloat64x2Instr( |
| 3373 new(Z) Value(call->ArgumentAt(1)), | 3119 new (Z) Value(call->ArgumentAt(1)), call->deopt_id()); |
| 3374 call->deopt_id()); | |
| 3375 break; | 3120 break; |
| 3376 case MethodRecognizer::kInt32x4BoolConstructor: | 3121 case MethodRecognizer::kInt32x4BoolConstructor: |
| 3377 *last = new(Z) Int32x4BoolConstructorInstr( | 3122 *last = new (Z) Int32x4BoolConstructorInstr( |
| 3378 new(Z) Value(call->ArgumentAt(1)), | 3123 new (Z) Value(call->ArgumentAt(1)), |
| 3379 new(Z) Value(call->ArgumentAt(2)), | 3124 new (Z) Value(call->ArgumentAt(2)), |
| 3380 new(Z) Value(call->ArgumentAt(3)), | 3125 new (Z) Value(call->ArgumentAt(3)), |
| 3381 new(Z) Value(call->ArgumentAt(4)), | 3126 new (Z) Value(call->ArgumentAt(4)), call->deopt_id()); |
| 3382 call->deopt_id()); | |
| 3383 break; | 3127 break; |
| 3384 case MethodRecognizer::kInt32x4Constructor: | 3128 case MethodRecognizer::kInt32x4Constructor: |
| 3385 *last = new(Z) Int32x4ConstructorInstr( | 3129 *last = new (Z) Int32x4ConstructorInstr( |
| 3386 new(Z) Value(call->ArgumentAt(1)), | 3130 new (Z) Value(call->ArgumentAt(1)), |
| 3387 new(Z) Value(call->ArgumentAt(2)), | 3131 new (Z) Value(call->ArgumentAt(2)), |
| 3388 new(Z) Value(call->ArgumentAt(3)), | 3132 new (Z) Value(call->ArgumentAt(3)), |
| 3389 new(Z) Value(call->ArgumentAt(4)), | 3133 new (Z) Value(call->ArgumentAt(4)), call->deopt_id()); |
| 3390 call->deopt_id()); | |
| 3391 break; | 3134 break; |
| 3392 case MethodRecognizer::kInt32x4FromFloat32x4Bits: | 3135 case MethodRecognizer::kInt32x4FromFloat32x4Bits: |
| 3393 *last = new(Z) Float32x4ToInt32x4Instr(new(Z) Value(call->ArgumentAt(1)), | 3136 *last = new (Z) Float32x4ToInt32x4Instr( |
| 3394 call->deopt_id()); | 3137 new (Z) Value(call->ArgumentAt(1)), call->deopt_id()); |
| 3395 break; | 3138 break; |
| 3396 default: | 3139 default: |
| 3397 UNREACHABLE(); | 3140 UNREACHABLE(); |
| 3398 return false; | 3141 return false; |
| 3399 } | 3142 } |
| 3400 flow_graph->AppendTo(cursor, *last, | 3143 flow_graph->AppendTo( |
| 3401 call->deopt_id() != Thread::kNoDeoptId ? | 3144 cursor, *last, |
| 3402 call->env() : NULL, | 3145 call->deopt_id() != Thread::kNoDeoptId ? call->env() : NULL, |
| 3403 FlowGraph::kValue); | 3146 FlowGraph::kValue); |
| 3404 return true; | 3147 return true; |
| 3405 } | 3148 } |
| 3406 | 3149 |
| 3407 | 3150 |
| 3408 static bool InlineMathCFunction(FlowGraph* flow_graph, | 3151 static bool InlineMathCFunction(FlowGraph* flow_graph, |
| 3409 Instruction* call, | 3152 Instruction* call, |
| 3410 MethodRecognizer::Kind kind, | 3153 MethodRecognizer::Kind kind, |
| 3411 TargetEntryInstr** entry, | 3154 TargetEntryInstr** entry, |
| 3412 Definition** last) { | 3155 Definition** last) { |
| 3413 if (!CanUnboxDouble()) { | 3156 if (!CanUnboxDouble()) { |
| 3414 return false; | 3157 return false; |
| 3415 } | 3158 } |
| 3416 *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(), | 3159 *entry = new (Z) TargetEntryInstr(flow_graph->allocate_block_id(), |
| 3417 call->GetBlock()->try_index()); | 3160 call->GetBlock()->try_index()); |
| 3418 (*entry)->InheritDeoptTarget(Z, call); | 3161 (*entry)->InheritDeoptTarget(Z, call); |
| 3419 Instruction* cursor = *entry; | 3162 Instruction* cursor = *entry; |
| 3420 | 3163 |
| 3421 switch (kind) { | 3164 switch (kind) { |
| 3422 case MethodRecognizer::kMathSqrt: { | 3165 case MethodRecognizer::kMathSqrt: { |
| 3423 *last = new(Z) MathUnaryInstr(MathUnaryInstr::kSqrt, | 3166 *last = new (Z) |
| 3424 new(Z) Value(call->ArgumentAt(0)), | 3167 MathUnaryInstr(MathUnaryInstr::kSqrt, |
| 3425 call->deopt_id()); | 3168 new (Z) Value(call->ArgumentAt(0)), call->deopt_id()); |
| 3426 break; | 3169 break; |
| 3427 } | 3170 } |
| 3428 default: { | 3171 default: { |
| 3429 ZoneGrowableArray<Value*>* args = | 3172 ZoneGrowableArray<Value*>* args = |
| 3430 new(Z) ZoneGrowableArray<Value*>(call->ArgumentCount()); | 3173 new (Z) ZoneGrowableArray<Value*>(call->ArgumentCount()); |
| 3431 for (intptr_t i = 0; i < call->ArgumentCount(); i++) { | 3174 for (intptr_t i = 0; i < call->ArgumentCount(); i++) { |
| 3432 args->Add(new(Z) Value(call->ArgumentAt(i))); | 3175 args->Add(new (Z) Value(call->ArgumentAt(i))); |
| 3433 } | 3176 } |
| 3434 *last = new(Z) InvokeMathCFunctionInstr(args, | 3177 *last = new (Z) InvokeMathCFunctionInstr(args, call->deopt_id(), kind, |
| 3435 call->deopt_id(), | 3178 call->token_pos()); |
| 3436 kind, | |
| 3437 call->token_pos()); | |
| 3438 break; | 3179 break; |
| 3439 } | 3180 } |
| 3440 } | 3181 } |
| 3441 flow_graph->AppendTo(cursor, *last, | 3182 flow_graph->AppendTo( |
| 3442 call->deopt_id() != Thread::kNoDeoptId ? | 3183 cursor, *last, |
| 3443 call->env() : NULL, | 3184 call->deopt_id() != Thread::kNoDeoptId ? call->env() : NULL, |
| 3444 FlowGraph::kValue); | 3185 FlowGraph::kValue); |
| 3445 return true; | 3186 return true; |
| 3446 } | 3187 } |
| 3447 | 3188 |
| 3448 | 3189 |
| 3449 bool FlowGraphInliner::TryInlineRecognizedMethod(FlowGraph* flow_graph, | 3190 bool FlowGraphInliner::TryInlineRecognizedMethod(FlowGraph* flow_graph, |
| 3450 intptr_t receiver_cid, | 3191 intptr_t receiver_cid, |
| 3451 const Function& target, | 3192 const Function& target, |
| 3452 Definition* call, | 3193 Definition* call, |
| 3453 Definition* receiver, | 3194 Definition* receiver, |
| 3454 TokenPosition token_pos, | 3195 TokenPosition token_pos, |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3544 token_pos, value_check, entry, last); | 3285 token_pos, value_check, entry, last); |
| 3545 case MethodRecognizer::kFloat64x2ArraySetIndexed: | 3286 case MethodRecognizer::kFloat64x2ArraySetIndexed: |
| 3546 if (!ShouldInlineSimd()) { | 3287 if (!ShouldInlineSimd()) { |
| 3547 return false; | 3288 return false; |
| 3548 } | 3289 } |
| 3549 value_check = ic_data.AsUnaryClassChecksForCid(kFloat64x2Cid, target); | 3290 value_check = ic_data.AsUnaryClassChecksForCid(kFloat64x2Cid, target); |
| 3550 return InlineSetIndexed(flow_graph, kind, target, call, receiver, | 3291 return InlineSetIndexed(flow_graph, kind, target, call, receiver, |
| 3551 token_pos, value_check, entry, last); | 3292 token_pos, value_check, entry, last); |
| 3552 case MethodRecognizer::kByteArrayBaseGetInt8: | 3293 case MethodRecognizer::kByteArrayBaseGetInt8: |
| 3553 return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid, | 3294 return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid, |
| 3554 kTypedDataInt8ArrayCid, | 3295 kTypedDataInt8ArrayCid, entry, last); |
| 3555 entry, last); | |
| 3556 case MethodRecognizer::kByteArrayBaseGetUint8: | 3296 case MethodRecognizer::kByteArrayBaseGetUint8: |
| 3557 return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid, | 3297 return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid, |
| 3558 kTypedDataUint8ArrayCid, | 3298 kTypedDataUint8ArrayCid, entry, last); |
| 3559 entry, last); | |
| 3560 case MethodRecognizer::kByteArrayBaseGetInt16: | 3299 case MethodRecognizer::kByteArrayBaseGetInt16: |
| 3561 return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid, | 3300 return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid, |
| 3562 kTypedDataInt16ArrayCid, | 3301 kTypedDataInt16ArrayCid, entry, last); |
| 3563 entry, last); | |
| 3564 case MethodRecognizer::kByteArrayBaseGetUint16: | 3302 case MethodRecognizer::kByteArrayBaseGetUint16: |
| 3565 return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid, | 3303 return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid, |
| 3566 kTypedDataUint16ArrayCid, | 3304 kTypedDataUint16ArrayCid, entry, last); |
| 3567 entry, last); | |
| 3568 case MethodRecognizer::kByteArrayBaseGetInt32: | 3305 case MethodRecognizer::kByteArrayBaseGetInt32: |
| 3569 if (!CanUnboxInt32()) { | 3306 if (!CanUnboxInt32()) { |
| 3570 return false; | 3307 return false; |
| 3571 } | 3308 } |
| 3572 return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid, | 3309 return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid, |
| 3573 kTypedDataInt32ArrayCid, | 3310 kTypedDataInt32ArrayCid, entry, last); |
| 3574 entry, last); | |
| 3575 case MethodRecognizer::kByteArrayBaseGetUint32: | 3311 case MethodRecognizer::kByteArrayBaseGetUint32: |
| 3576 if (!CanUnboxInt32()) { | 3312 if (!CanUnboxInt32()) { |
| 3577 return false; | 3313 return false; |
| 3578 } | 3314 } |
| 3579 return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid, | 3315 return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid, |
| 3580 kTypedDataUint32ArrayCid, | 3316 kTypedDataUint32ArrayCid, entry, last); |
| 3581 entry, last); | |
| 3582 case MethodRecognizer::kByteArrayBaseGetFloat32: | 3317 case MethodRecognizer::kByteArrayBaseGetFloat32: |
| 3583 if (!CanUnboxDouble()) { | 3318 if (!CanUnboxDouble()) { |
| 3584 return false; | 3319 return false; |
| 3585 } | 3320 } |
| 3586 return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid, | 3321 return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid, |
| 3587 kTypedDataFloat32ArrayCid, | 3322 kTypedDataFloat32ArrayCid, entry, last); |
| 3588 entry, last); | |
| 3589 case MethodRecognizer::kByteArrayBaseGetFloat64: | 3323 case MethodRecognizer::kByteArrayBaseGetFloat64: |
| 3590 if (!CanUnboxDouble()) { | 3324 if (!CanUnboxDouble()) { |
| 3591 return false; | 3325 return false; |
| 3592 } | 3326 } |
| 3593 return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid, | 3327 return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid, |
| 3594 kTypedDataFloat64ArrayCid, | 3328 kTypedDataFloat64ArrayCid, entry, last); |
| 3595 entry, last); | |
| 3596 case MethodRecognizer::kByteArrayBaseGetFloat32x4: | 3329 case MethodRecognizer::kByteArrayBaseGetFloat32x4: |
| 3597 if (!ShouldInlineSimd()) { | 3330 if (!ShouldInlineSimd()) { |
| 3598 return false; | 3331 return false; |
| 3599 } | 3332 } |
| 3600 return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid, | 3333 return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid, |
| 3601 kTypedDataFloat32x4ArrayCid, | 3334 kTypedDataFloat32x4ArrayCid, entry, last); |
| 3602 entry, last); | |
| 3603 case MethodRecognizer::kByteArrayBaseGetInt32x4: | 3335 case MethodRecognizer::kByteArrayBaseGetInt32x4: |
| 3604 if (!ShouldInlineSimd()) { | 3336 if (!ShouldInlineSimd()) { |
| 3605 return false; | 3337 return false; |
| 3606 } | 3338 } |
| 3607 return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid, | 3339 return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid, |
| 3608 kTypedDataInt32x4ArrayCid, | 3340 kTypedDataInt32x4ArrayCid, entry, last); |
| 3609 entry, last); | |
| 3610 case MethodRecognizer::kByteArrayBaseSetInt8: | 3341 case MethodRecognizer::kByteArrayBaseSetInt8: |
| 3611 return InlineByteArrayBaseStore(flow_graph, target, call, receiver, | 3342 return InlineByteArrayBaseStore(flow_graph, target, call, receiver, |
| 3612 receiver_cid, kTypedDataInt8ArrayCid, | 3343 receiver_cid, kTypedDataInt8ArrayCid, |
| 3613 entry, last); | 3344 entry, last); |
| 3614 case MethodRecognizer::kByteArrayBaseSetUint8: | 3345 case MethodRecognizer::kByteArrayBaseSetUint8: |
| 3615 return InlineByteArrayBaseStore(flow_graph, target, call, receiver, | 3346 return InlineByteArrayBaseStore(flow_graph, target, call, receiver, |
| 3616 receiver_cid, | 3347 receiver_cid, kTypedDataUint8ArrayCid, |
| 3617 kTypedDataUint8ArrayCid, | |
| 3618 entry, last); | 3348 entry, last); |
| 3619 case MethodRecognizer::kByteArrayBaseSetInt16: | 3349 case MethodRecognizer::kByteArrayBaseSetInt16: |
| 3620 return InlineByteArrayBaseStore(flow_graph, target, call, receiver, | 3350 return InlineByteArrayBaseStore(flow_graph, target, call, receiver, |
| 3621 receiver_cid, | 3351 receiver_cid, kTypedDataInt16ArrayCid, |
| 3622 kTypedDataInt16ArrayCid, | |
| 3623 entry, last); | 3352 entry, last); |
| 3624 case MethodRecognizer::kByteArrayBaseSetUint16: | 3353 case MethodRecognizer::kByteArrayBaseSetUint16: |
| 3625 return InlineByteArrayBaseStore(flow_graph, target, call, receiver, | 3354 return InlineByteArrayBaseStore(flow_graph, target, call, receiver, |
| 3626 receiver_cid, | 3355 receiver_cid, kTypedDataUint16ArrayCid, |
| 3627 kTypedDataUint16ArrayCid, | |
| 3628 entry, last); | 3356 entry, last); |
| 3629 case MethodRecognizer::kByteArrayBaseSetInt32: | 3357 case MethodRecognizer::kByteArrayBaseSetInt32: |
| 3630 return InlineByteArrayBaseStore(flow_graph, target, call, receiver, | 3358 return InlineByteArrayBaseStore(flow_graph, target, call, receiver, |
| 3631 receiver_cid, | 3359 receiver_cid, kTypedDataInt32ArrayCid, |
| 3632 kTypedDataInt32ArrayCid, | |
| 3633 entry, last); | 3360 entry, last); |
| 3634 case MethodRecognizer::kByteArrayBaseSetUint32: | 3361 case MethodRecognizer::kByteArrayBaseSetUint32: |
| 3635 return InlineByteArrayBaseStore(flow_graph, target, call, receiver, | 3362 return InlineByteArrayBaseStore(flow_graph, target, call, receiver, |
| 3636 receiver_cid, | 3363 receiver_cid, kTypedDataUint32ArrayCid, |
| 3637 kTypedDataUint32ArrayCid, | |
| 3638 entry, last); | 3364 entry, last); |
| 3639 case MethodRecognizer::kByteArrayBaseSetFloat32: | 3365 case MethodRecognizer::kByteArrayBaseSetFloat32: |
| 3640 if (!CanUnboxDouble()) { | 3366 if (!CanUnboxDouble()) { |
| 3641 return false; | 3367 return false; |
| 3642 } | 3368 } |
| 3643 return InlineByteArrayBaseStore(flow_graph, target, call, receiver, | 3369 return InlineByteArrayBaseStore(flow_graph, target, call, receiver, |
| 3644 receiver_cid, | 3370 receiver_cid, kTypedDataFloat32ArrayCid, |
| 3645 kTypedDataFloat32ArrayCid, | |
| 3646 entry, last); | 3371 entry, last); |
| 3647 case MethodRecognizer::kByteArrayBaseSetFloat64: | 3372 case MethodRecognizer::kByteArrayBaseSetFloat64: |
| 3648 if (!CanUnboxDouble()) { | 3373 if (!CanUnboxDouble()) { |
| 3649 return false; | 3374 return false; |
| 3650 } | 3375 } |
| 3651 return InlineByteArrayBaseStore(flow_graph, target, call, receiver, | 3376 return InlineByteArrayBaseStore(flow_graph, target, call, receiver, |
| 3652 receiver_cid, | 3377 receiver_cid, kTypedDataFloat64ArrayCid, |
| 3653 kTypedDataFloat64ArrayCid, | |
| 3654 entry, last); | 3378 entry, last); |
| 3655 case MethodRecognizer::kByteArrayBaseSetFloat32x4: | 3379 case MethodRecognizer::kByteArrayBaseSetFloat32x4: |
| 3656 if (!ShouldInlineSimd()) { | 3380 if (!ShouldInlineSimd()) { |
| 3657 return false; | 3381 return false; |
| 3658 } | 3382 } |
| 3659 return InlineByteArrayBaseStore(flow_graph, target, call, receiver, | 3383 return InlineByteArrayBaseStore(flow_graph, target, call, receiver, |
| 3660 receiver_cid, | 3384 receiver_cid, kTypedDataFloat32x4ArrayCid, |
| 3661 kTypedDataFloat32x4ArrayCid, | |
| 3662 entry, last); | 3385 entry, last); |
| 3663 case MethodRecognizer::kByteArrayBaseSetInt32x4: | 3386 case MethodRecognizer::kByteArrayBaseSetInt32x4: |
| 3664 if (!ShouldInlineSimd()) { | 3387 if (!ShouldInlineSimd()) { |
| 3665 return false; | 3388 return false; |
| 3666 } | 3389 } |
| 3667 return InlineByteArrayBaseStore(flow_graph, target, call, receiver, | 3390 return InlineByteArrayBaseStore(flow_graph, target, call, receiver, |
| 3668 receiver_cid, | 3391 receiver_cid, kTypedDataInt32x4ArrayCid, |
| 3669 kTypedDataInt32x4ArrayCid, | |
| 3670 entry, last); | 3392 entry, last); |
| 3671 case MethodRecognizer::kOneByteStringCodeUnitAt: | 3393 case MethodRecognizer::kOneByteStringCodeUnitAt: |
| 3672 case MethodRecognizer::kTwoByteStringCodeUnitAt: | 3394 case MethodRecognizer::kTwoByteStringCodeUnitAt: |
| 3673 case MethodRecognizer::kExternalOneByteStringCodeUnitAt: | 3395 case MethodRecognizer::kExternalOneByteStringCodeUnitAt: |
| 3674 case MethodRecognizer::kExternalTwoByteStringCodeUnitAt: | 3396 case MethodRecognizer::kExternalTwoByteStringCodeUnitAt: |
| 3675 return InlineStringCodeUnitAt( | 3397 return InlineStringCodeUnitAt(flow_graph, call, receiver_cid, entry, |
| 3676 flow_graph, call, receiver_cid, entry, last); | 3398 last); |
| 3677 case MethodRecognizer::kStringBaseCharAt: | 3399 case MethodRecognizer::kStringBaseCharAt: |
| 3678 return InlineStringBaseCharAt( | 3400 return InlineStringBaseCharAt(flow_graph, call, receiver_cid, entry, |
| 3679 flow_graph, call, receiver_cid, entry, last); | 3401 last); |
| 3680 case MethodRecognizer::kDoubleAdd: | 3402 case MethodRecognizer::kDoubleAdd: |
| 3681 return InlineDoubleOp(flow_graph, Token::kADD, call, entry, last); | 3403 return InlineDoubleOp(flow_graph, Token::kADD, call, entry, last); |
| 3682 case MethodRecognizer::kDoubleSub: | 3404 case MethodRecognizer::kDoubleSub: |
| 3683 return InlineDoubleOp(flow_graph, Token::kSUB, call, entry, last); | 3405 return InlineDoubleOp(flow_graph, Token::kSUB, call, entry, last); |
| 3684 case MethodRecognizer::kDoubleMul: | 3406 case MethodRecognizer::kDoubleMul: |
| 3685 return InlineDoubleOp(flow_graph, Token::kMUL, call, entry, last); | 3407 return InlineDoubleOp(flow_graph, Token::kMUL, call, entry, last); |
| 3686 case MethodRecognizer::kDoubleDiv: | 3408 case MethodRecognizer::kDoubleDiv: |
| 3687 return InlineDoubleOp(flow_graph, Token::kDIV, call, entry, last); | 3409 return InlineDoubleOp(flow_graph, Token::kDIV, call, entry, last); |
| 3688 case MethodRecognizer::kDouble_getIsNaN: | 3410 case MethodRecognizer::kDouble_getIsNaN: |
| 3689 case MethodRecognizer::kDouble_getIsInfinite: | 3411 case MethodRecognizer::kDouble_getIsInfinite: |
| 3690 return InlineDoubleTestOp(flow_graph, call, kind, entry, last); | 3412 return InlineDoubleTestOp(flow_graph, call, kind, entry, last); |
| 3691 case MethodRecognizer::kGrowableArraySetData: | 3413 case MethodRecognizer::kGrowableArraySetData: |
| 3692 ASSERT(receiver_cid == kGrowableObjectArrayCid); | 3414 ASSERT(receiver_cid == kGrowableObjectArrayCid); |
| 3693 ASSERT(ic_data.NumberOfChecks() == 1); | 3415 ASSERT(ic_data.NumberOfChecks() == 1); |
| 3694 return InlineGrowableArraySetter( | 3416 return InlineGrowableArraySetter(flow_graph, |
| 3695 flow_graph, GrowableObjectArray::data_offset(), kEmitStoreBarrier, | 3417 GrowableObjectArray::data_offset(), |
| 3696 call, entry, last); | 3418 kEmitStoreBarrier, call, entry, last); |
| 3697 case MethodRecognizer::kGrowableArraySetLength: | 3419 case MethodRecognizer::kGrowableArraySetLength: |
| 3698 ASSERT(receiver_cid == kGrowableObjectArrayCid); | 3420 ASSERT(receiver_cid == kGrowableObjectArrayCid); |
| 3699 ASSERT(ic_data.NumberOfChecks() == 1); | 3421 ASSERT(ic_data.NumberOfChecks() == 1); |
| 3700 return InlineGrowableArraySetter( | 3422 return InlineGrowableArraySetter(flow_graph, |
| 3701 flow_graph, GrowableObjectArray::length_offset(), kNoStoreBarrier, | 3423 GrowableObjectArray::length_offset(), |
| 3702 call, entry, last); | 3424 kNoStoreBarrier, call, entry, last); |
| 3703 case MethodRecognizer::kSmi_bitAndFromSmi: | 3425 case MethodRecognizer::kSmi_bitAndFromSmi: |
| 3704 return InlineSmiBitAndFromSmi(flow_graph, call, entry, last); | 3426 return InlineSmiBitAndFromSmi(flow_graph, call, entry, last); |
| 3705 | 3427 |
| 3706 case MethodRecognizer::kFloat32x4ShuffleX: | 3428 case MethodRecognizer::kFloat32x4ShuffleX: |
| 3707 case MethodRecognizer::kFloat32x4ShuffleY: | 3429 case MethodRecognizer::kFloat32x4ShuffleY: |
| 3708 case MethodRecognizer::kFloat32x4ShuffleZ: | 3430 case MethodRecognizer::kFloat32x4ShuffleZ: |
| 3709 case MethodRecognizer::kFloat32x4ShuffleW: | 3431 case MethodRecognizer::kFloat32x4ShuffleW: |
| 3710 case MethodRecognizer::kFloat32x4GetSignMask: | 3432 case MethodRecognizer::kFloat32x4GetSignMask: |
| 3711 case MethodRecognizer::kFloat32x4Equal: | 3433 case MethodRecognizer::kFloat32x4Equal: |
| 3712 case MethodRecognizer::kFloat32x4GreaterThan: | 3434 case MethodRecognizer::kFloat32x4GreaterThan: |
| (...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3780 case MethodRecognizer::kMathSin: | 3502 case MethodRecognizer::kMathSin: |
| 3781 case MethodRecognizer::kMathCos: | 3503 case MethodRecognizer::kMathCos: |
| 3782 case MethodRecognizer::kMathTan: | 3504 case MethodRecognizer::kMathTan: |
| 3783 case MethodRecognizer::kMathAsin: | 3505 case MethodRecognizer::kMathAsin: |
| 3784 case MethodRecognizer::kMathAcos: | 3506 case MethodRecognizer::kMathAcos: |
| 3785 case MethodRecognizer::kMathAtan: | 3507 case MethodRecognizer::kMathAtan: |
| 3786 case MethodRecognizer::kMathAtan2: | 3508 case MethodRecognizer::kMathAtan2: |
| 3787 return InlineMathCFunction(flow_graph, call, kind, entry, last); | 3509 return InlineMathCFunction(flow_graph, call, kind, entry, last); |
| 3788 | 3510 |
| 3789 case MethodRecognizer::kObjectConstructor: { | 3511 case MethodRecognizer::kObjectConstructor: { |
| 3790 *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(), | 3512 *entry = new (Z) TargetEntryInstr(flow_graph->allocate_block_id(), |
| 3791 call->GetBlock()->try_index()); | 3513 call->GetBlock()->try_index()); |
| 3792 (*entry)->InheritDeoptTarget(Z, call); | 3514 (*entry)->InheritDeoptTarget(Z, call); |
| 3793 ASSERT(!call->HasUses()); | 3515 ASSERT(!call->HasUses()); |
| 3794 *last = NULL; // Empty body. | 3516 *last = NULL; // Empty body. |
| 3795 return true; | 3517 return true; |
| 3796 } | 3518 } |
| 3797 | 3519 |
| 3798 case MethodRecognizer::kObjectArrayAllocate: { | 3520 case MethodRecognizer::kObjectArrayAllocate: { |
| 3799 Value* num_elements = new(Z) Value(call->ArgumentAt(1)); | 3521 Value* num_elements = new (Z) Value(call->ArgumentAt(1)); |
| 3800 if (num_elements->BindsToConstant() && | 3522 if (num_elements->BindsToConstant() && |
| 3801 num_elements->BoundConstant().IsSmi()) { | 3523 num_elements->BoundConstant().IsSmi()) { |
| 3802 intptr_t length = | 3524 intptr_t length = Smi::Cast(num_elements->BoundConstant()).Value(); |
| 3803 Smi::Cast(num_elements->BoundConstant()).Value(); | |
| 3804 if (length >= 0 && length <= Array::kMaxElements) { | 3525 if (length >= 0 && length <= Array::kMaxElements) { |
| 3805 Value* type = new(Z) Value(call->ArgumentAt(0)); | 3526 Value* type = new (Z) Value(call->ArgumentAt(0)); |
| 3806 *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(), | 3527 *entry = new (Z) TargetEntryInstr(flow_graph->allocate_block_id(), |
| 3807 call->GetBlock()->try_index()); | 3528 call->GetBlock()->try_index()); |
| 3808 (*entry)->InheritDeoptTarget(Z, call); | 3529 (*entry)->InheritDeoptTarget(Z, call); |
| 3809 *last = | 3530 *last = |
| 3810 new(Z) CreateArrayInstr(call->token_pos(), type, num_elements); | 3531 new (Z) CreateArrayInstr(call->token_pos(), type, num_elements); |
| 3811 flow_graph->AppendTo(*entry, *last, | 3532 flow_graph->AppendTo( |
| 3812 call->deopt_id() != Thread::kNoDeoptId ? | 3533 *entry, *last, |
| 3813 call->env() : NULL, | 3534 call->deopt_id() != Thread::kNoDeoptId ? call->env() : NULL, |
| 3814 FlowGraph::kValue); | 3535 FlowGraph::kValue); |
| 3815 return true; | 3536 return true; |
| 3816 } | 3537 } |
| 3817 } | 3538 } |
| 3818 return false; | 3539 return false; |
| 3819 } | 3540 } |
| 3820 | 3541 |
| 3821 case MethodRecognizer::kObjectRuntimeType: { | 3542 case MethodRecognizer::kObjectRuntimeType: { |
| 3822 Type& type = Type::ZoneHandle(Z); | 3543 Type& type = Type::ZoneHandle(Z); |
| 3823 if (RawObject::IsStringClassId(receiver_cid)) { | 3544 if (RawObject::IsStringClassId(receiver_cid)) { |
| 3824 type = Type::StringType(); | 3545 type = Type::StringType(); |
| 3825 } else if (receiver_cid == kDoubleCid) { | 3546 } else if (receiver_cid == kDoubleCid) { |
| 3826 type = Type::Double(); | 3547 type = Type::Double(); |
| 3827 } else if (RawObject::IsIntegerClassId(receiver_cid)) { | 3548 } else if (RawObject::IsIntegerClassId(receiver_cid)) { |
| 3828 type = Type::IntType(); | 3549 type = Type::IntType(); |
| 3829 } else if (receiver_cid != kClosureCid) { | 3550 } else if (receiver_cid != kClosureCid) { |
| 3830 const Class& cls = Class::Handle(Z, | 3551 const Class& cls = Class::Handle( |
| 3831 flow_graph->isolate()->class_table()->At(receiver_cid)); | 3552 Z, flow_graph->isolate()->class_table()->At(receiver_cid)); |
| 3832 if (!cls.IsGeneric()) { | 3553 if (!cls.IsGeneric()) { |
| 3833 type = cls.CanonicalType(); | 3554 type = cls.CanonicalType(); |
| 3834 } | 3555 } |
| 3835 } | 3556 } |
| 3836 | 3557 |
| 3837 if (!type.IsNull()) { | 3558 if (!type.IsNull()) { |
| 3838 *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(), | 3559 *entry = new (Z) TargetEntryInstr(flow_graph->allocate_block_id(), |
| 3839 call->GetBlock()->try_index()); | 3560 call->GetBlock()->try_index()); |
| 3840 (*entry)->InheritDeoptTarget(Z, call); | 3561 (*entry)->InheritDeoptTarget(Z, call); |
| 3841 *last = new(Z) ConstantInstr(type); | 3562 *last = new (Z) ConstantInstr(type); |
| 3842 flow_graph->AppendTo(*entry, *last, | 3563 flow_graph->AppendTo( |
| 3843 call->deopt_id() != Thread::kNoDeoptId ? | 3564 *entry, *last, |
| 3844 call->env() : NULL, | 3565 call->deopt_id() != Thread::kNoDeoptId ? call->env() : NULL, |
| 3845 FlowGraph::kValue); | 3566 FlowGraph::kValue); |
| 3846 return true; | 3567 return true; |
| 3847 } | 3568 } |
| 3848 return false; | 3569 return false; |
| 3849 } | 3570 } |
| 3850 | 3571 |
| 3851 case MethodRecognizer::kOneByteStringSetAt: { | 3572 case MethodRecognizer::kOneByteStringSetAt: { |
| 3852 // This is an internal method, no need to check argument types nor | 3573 // This is an internal method, no need to check argument types nor |
| 3853 // range. | 3574 // range. |
| 3854 *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(), | 3575 *entry = new (Z) TargetEntryInstr(flow_graph->allocate_block_id(), |
| 3855 call->GetBlock()->try_index()); | 3576 call->GetBlock()->try_index()); |
| 3856 (*entry)->InheritDeoptTarget(Z, call); | 3577 (*entry)->InheritDeoptTarget(Z, call); |
| 3857 Definition* str = call->ArgumentAt(0); | 3578 Definition* str = call->ArgumentAt(0); |
| 3858 Definition* index = call->ArgumentAt(1); | 3579 Definition* index = call->ArgumentAt(1); |
| 3859 Definition* value = call->ArgumentAt(2); | 3580 Definition* value = call->ArgumentAt(2); |
| 3860 *last = new(Z) StoreIndexedInstr( | 3581 *last = |
| 3861 new(Z) Value(str), | 3582 new (Z) StoreIndexedInstr(new (Z) Value(str), new (Z) Value(index), |
| 3862 new(Z) Value(index), | 3583 new (Z) Value(value), kNoStoreBarrier, |
| 3863 new(Z) Value(value), | 3584 1, // Index scale |
| 3864 kNoStoreBarrier, | 3585 kOneByteStringCid, kAlignedAccess, |
| 3865 1, // Index scale | 3586 call->deopt_id(), call->token_pos()); |
| 3866 kOneByteStringCid, | 3587 flow_graph->AppendTo( |
| 3867 kAlignedAccess, | 3588 *entry, *last, |
| 3868 call->deopt_id(), | 3589 call->deopt_id() != Thread::kNoDeoptId ? call->env() : NULL, |
| 3869 call->token_pos()); | 3590 FlowGraph::kEffect); |
| 3870 flow_graph->AppendTo(*entry, | |
| 3871 *last, | |
| 3872 call->deopt_id() != Thread::kNoDeoptId ? | |
| 3873 call->env() : NULL, | |
| 3874 FlowGraph::kEffect); | |
| 3875 return true; | 3591 return true; |
| 3876 } | 3592 } |
| 3877 | 3593 |
| 3878 default: | 3594 default: |
| 3879 return false; | 3595 return false; |
| 3880 } | 3596 } |
| 3881 } | 3597 } |
| 3882 | 3598 |
| 3883 | 3599 |
| 3884 } // namespace dart | 3600 } // namespace dart |
| OLD | NEW |