| OLD | NEW |
| 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef RUNTIME_VM_FLOW_GRAPH_COMPILER_H_ | 5 #ifndef RUNTIME_VM_FLOW_GRAPH_COMPILER_H_ |
| 6 #define RUNTIME_VM_FLOW_GRAPH_COMPILER_H_ | 6 #define RUNTIME_VM_FLOW_GRAPH_COMPILER_H_ |
| 7 | 7 |
| 8 #include "vm/allocation.h" | 8 #include "vm/allocation.h" |
| 9 #include "vm/assembler.h" | 9 #include "vm/assembler.h" |
| 10 #include "vm/code_descriptors.h" | 10 #include "vm/code_descriptors.h" |
| 11 #include "vm/code_generator.h" | 11 #include "vm/code_generator.h" |
| 12 #include "vm/intermediate_language.h" | 12 #include "vm/intermediate_language.h" |
| 13 | 13 |
| 14 namespace dart { | 14 namespace dart { |
| 15 | 15 |
| 16 // Forward declarations. | 16 // Forward declarations. |
| 17 class Code; | 17 class Code; |
| 18 class DeoptInfoBuilder; | 18 class DeoptInfoBuilder; |
| 19 class FlowGraph; | 19 class FlowGraph; |
| 20 class FlowGraphCompiler; | 20 class FlowGraphCompiler; |
| 21 class Function; | 21 class Function; |
| 22 template <typename T> class GrowableArray; | 22 template <typename T> |
| 23 class GrowableArray; |
| 23 class ParsedFunction; | 24 class ParsedFunction; |
| 24 | 25 |
| 25 | 26 |
| 26 class ParallelMoveResolver : public ValueObject { | 27 class ParallelMoveResolver : public ValueObject { |
| 27 public: | 28 public: |
| 28 explicit ParallelMoveResolver(FlowGraphCompiler* compiler); | 29 explicit ParallelMoveResolver(FlowGraphCompiler* compiler); |
| 29 | 30 |
| 30 // Resolve a set of parallel moves, emitting assembler instructions. | 31 // Resolve a set of parallel moves, emitting assembler instructions. |
| 31 void EmitNativeCode(ParallelMoveInstr* parallel_move); | 32 void EmitNativeCode(ParallelMoveInstr* parallel_move); |
| 32 | 33 |
| (...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 117 ICData::DeoptReasonId reason, | 118 ICData::DeoptReasonId reason, |
| 118 uint32_t flags, | 119 uint32_t flags, |
| 119 Environment* deopt_env) | 120 Environment* deopt_env) |
| 120 : pc_offset_(-1), | 121 : pc_offset_(-1), |
| 121 deopt_id_(deopt_id), | 122 deopt_id_(deopt_id), |
| 122 reason_(reason), | 123 reason_(reason), |
| 123 flags_(flags), | 124 flags_(flags), |
| 124 deopt_env_(deopt_env) { | 125 deopt_env_(deopt_env) { |
| 125 ASSERT(deopt_env != NULL); | 126 ASSERT(deopt_env != NULL); |
| 126 } | 127 } |
| 127 virtual ~CompilerDeoptInfo() { } | 128 virtual ~CompilerDeoptInfo() {} |
| 128 | 129 |
| 129 RawTypedData* CreateDeoptInfo(FlowGraphCompiler* compiler, | 130 RawTypedData* CreateDeoptInfo(FlowGraphCompiler* compiler, |
| 130 DeoptInfoBuilder* builder, | 131 DeoptInfoBuilder* builder, |
| 131 const Array& deopt_table); | 132 const Array& deopt_table); |
| 132 | 133 |
| 133 | 134 |
| 134 // No code needs to be generated. | 135 // No code needs to be generated. |
| 135 virtual void GenerateCode(FlowGraphCompiler* compiler, intptr_t stub_ix) {} | 136 virtual void GenerateCode(FlowGraphCompiler* compiler, intptr_t stub_ix) {} |
| 136 | 137 |
| 137 intptr_t pc_offset() const { return pc_offset_; } | 138 intptr_t pc_offset() const { return pc_offset_; } |
| (...skipping 30 matching lines...) Expand all Loading... |
| 168 ASSERT(reason != ICData::kDeoptAtCall); | 169 ASSERT(reason != ICData::kDeoptAtCall); |
| 169 } | 170 } |
| 170 | 171 |
| 171 Label* entry_label() { return &entry_label_; } | 172 Label* entry_label() { return &entry_label_; } |
| 172 | 173 |
| 173 // Implementation is in architecture specific file. | 174 // Implementation is in architecture specific file. |
| 174 virtual void GenerateCode(FlowGraphCompiler* compiler, intptr_t stub_ix); | 175 virtual void GenerateCode(FlowGraphCompiler* compiler, intptr_t stub_ix); |
| 175 | 176 |
| 176 const char* Name() const { | 177 const char* Name() const { |
| 177 const char* kFormat = "Deopt stub for id %d, reason: %s"; | 178 const char* kFormat = "Deopt stub for id %d, reason: %s"; |
| 178 const intptr_t len = OS::SNPrint(NULL, 0, kFormat, | 179 const intptr_t len = OS::SNPrint(NULL, 0, kFormat, deopt_id(), |
| 179 deopt_id(), DeoptReasonToCString(reason())) + 1; | 180 DeoptReasonToCString(reason())) + |
| 181 1; |
| 180 char* chars = Thread::Current()->zone()->Alloc<char>(len); | 182 char* chars = Thread::Current()->zone()->Alloc<char>(len); |
| 181 OS::SNPrint(chars, len, kFormat, | 183 OS::SNPrint(chars, len, kFormat, deopt_id(), |
| 182 deopt_id(), DeoptReasonToCString(reason())); | 184 DeoptReasonToCString(reason())); |
| 183 return chars; | 185 return chars; |
| 184 } | 186 } |
| 185 | 187 |
| 186 private: | 188 private: |
| 187 Label entry_label_; | 189 Label entry_label_; |
| 188 | 190 |
| 189 DISALLOW_COPY_AND_ASSIGN(CompilerDeoptInfoWithStub); | 191 DISALLOW_COPY_AND_ASSIGN(CompilerDeoptInfoWithStub); |
| 190 }; | 192 }; |
| 191 | 193 |
| 192 | 194 |
| 193 class SlowPathCode : public ZoneAllocated { | 195 class SlowPathCode : public ZoneAllocated { |
| 194 public: | 196 public: |
| 195 SlowPathCode() : entry_label_(), exit_label_() { } | 197 SlowPathCode() : entry_label_(), exit_label_() {} |
| 196 virtual ~SlowPathCode() { } | 198 virtual ~SlowPathCode() {} |
| 197 | 199 |
| 198 Label* entry_label() { return &entry_label_; } | 200 Label* entry_label() { return &entry_label_; } |
| 199 Label* exit_label() { return &exit_label_; } | 201 Label* exit_label() { return &exit_label_; } |
| 200 | 202 |
| 201 void GenerateCode(FlowGraphCompiler* compiler) { | 203 void GenerateCode(FlowGraphCompiler* compiler) { |
| 202 EmitNativeCode(compiler); | 204 EmitNativeCode(compiler); |
| 203 ASSERT(entry_label_.IsBound()); | 205 ASSERT(entry_label_.IsBound()); |
| 204 } | 206 } |
| 205 | 207 |
| 206 private: | 208 private: |
| 207 virtual void EmitNativeCode(FlowGraphCompiler* compiler) = 0; | 209 virtual void EmitNativeCode(FlowGraphCompiler* compiler) = 0; |
| 208 | 210 |
| 209 Label entry_label_; | 211 Label entry_label_; |
| 210 Label exit_label_; | 212 Label exit_label_; |
| 211 | 213 |
| 212 DISALLOW_COPY_AND_ASSIGN(SlowPathCode); | 214 DISALLOW_COPY_AND_ASSIGN(SlowPathCode); |
| 213 }; | 215 }; |
| 214 | 216 |
| 215 | 217 |
| 216 struct CidTarget { | 218 struct CidTarget { |
| 217 intptr_t cid; | 219 intptr_t cid; |
| 218 Function* target; | 220 Function* target; |
| 219 intptr_t count; | 221 intptr_t count; |
| 220 CidTarget(intptr_t cid_arg, | 222 CidTarget(intptr_t cid_arg, Function* target_arg, intptr_t count_arg) |
| 221 Function* target_arg, | |
| 222 intptr_t count_arg) | |
| 223 : cid(cid_arg), target(target_arg), count(count_arg) {} | 223 : cid(cid_arg), target(target_arg), count(count_arg) {} |
| 224 }; | 224 }; |
| 225 | 225 |
| 226 | 226 |
| 227 class FlowGraphCompiler : public ValueObject { | 227 class FlowGraphCompiler : public ValueObject { |
| 228 private: | 228 private: |
| 229 class BlockInfo : public ZoneAllocated { | 229 class BlockInfo : public ZoneAllocated { |
| 230 public: | 230 public: |
| 231 BlockInfo() | 231 BlockInfo() |
| 232 : block_label_(), | 232 : block_label_(), |
| 233 jump_label_(&block_label_), | 233 jump_label_(&block_label_), |
| 234 next_nonempty_label_(NULL), | 234 next_nonempty_label_(NULL), |
| 235 is_marked_(false) {} | 235 is_marked_(false) {} |
| 236 | 236 |
| 237 // The label to jump to when control is transferred to this block. For | 237 // The label to jump to when control is transferred to this block. For |
| 238 // nonempty blocks it is the label of the block itself. For empty | 238 // nonempty blocks it is the label of the block itself. For empty |
| 239 // blocks it is the label of the first nonempty successor block. | 239 // blocks it is the label of the first nonempty successor block. |
| 240 Label* jump_label() const { return jump_label_; } | 240 Label* jump_label() const { return jump_label_; } |
| 241 void set_jump_label(Label* label) { jump_label_ = label; } | 241 void set_jump_label(Label* label) { jump_label_ = label; } |
| 242 | 242 |
| 243 // The label of the first nonempty block after this one in the block | 243 // The label of the first nonempty block after this one in the block |
| 244 // order, or NULL if there is no nonempty block following this one. | 244 // order, or NULL if there is no nonempty block following this one. |
| 245 Label* next_nonempty_label() const { return next_nonempty_label_; } | 245 Label* next_nonempty_label() const { return next_nonempty_label_; } |
| 246 void set_next_nonempty_label(Label* label) { next_nonempty_label_ = label; } | 246 void set_next_nonempty_label(Label* label) { next_nonempty_label_ = label; } |
| 247 | 247 |
| 248 bool WasCompacted() const { | 248 bool WasCompacted() const { return jump_label_ != &block_label_; } |
| 249 return jump_label_ != &block_label_; | |
| 250 } | |
| 251 | 249 |
| 252 // Block compaction is recursive. Block info for already-compacted | 250 // Block compaction is recursive. Block info for already-compacted |
| 253 // blocks is marked so as to avoid cycles in the graph. | 251 // blocks is marked so as to avoid cycles in the graph. |
| 254 bool is_marked() const { return is_marked_; } | 252 bool is_marked() const { return is_marked_; } |
| 255 void mark() { is_marked_ = true; } | 253 void mark() { is_marked_ = true; } |
| 256 | 254 |
| 257 private: | 255 private: |
| 258 Label block_label_; | 256 Label block_label_; |
| 259 | 257 |
| 260 Label* jump_label_; | 258 Label* jump_label_; |
| 261 Label* next_nonempty_label_; | 259 Label* next_nonempty_label_; |
| 262 | 260 |
| 263 bool is_marked_; | 261 bool is_marked_; |
| 264 }; | 262 }; |
| 265 | 263 |
| 266 public: | 264 public: |
| 267 FlowGraphCompiler( | 265 FlowGraphCompiler(Assembler* assembler, |
| 268 Assembler* assembler, | 266 FlowGraph* flow_graph, |
| 269 FlowGraph* flow_graph, | 267 const ParsedFunction& parsed_function, |
| 270 const ParsedFunction& parsed_function, | 268 bool is_optimizing, |
| 271 bool is_optimizing, | 269 const GrowableArray<const Function*>& inline_id_to_function, |
| 272 const GrowableArray<const Function*>& inline_id_to_function, | 270 const GrowableArray<TokenPosition>& inline_id_to_token_pos, |
| 273 const GrowableArray<TokenPosition>& inline_id_to_token_pos, | 271 const GrowableArray<intptr_t>& caller_inline_id); |
| 274 const GrowableArray<intptr_t>& caller_inline_id); | |
| 275 | 272 |
| 276 ~FlowGraphCompiler(); | 273 ~FlowGraphCompiler(); |
| 277 | 274 |
| 278 static bool SupportsUnboxedDoubles(); | 275 static bool SupportsUnboxedDoubles(); |
| 279 static bool SupportsUnboxedMints(); | 276 static bool SupportsUnboxedMints(); |
| 280 static bool SupportsSinCos(); | 277 static bool SupportsSinCos(); |
| 281 static bool SupportsUnboxedSimd128(); | 278 static bool SupportsUnboxedSimd128(); |
| 282 static bool SupportsHardwareDivision(); | 279 static bool SupportsHardwareDivision(); |
| 283 static bool CanConvertUnboxedMintToDouble(); | 280 static bool CanConvertUnboxedMintToDouble(); |
| 284 | 281 |
| 285 static bool IsUnboxedField(const Field& field); | 282 static bool IsUnboxedField(const Field& field); |
| 286 static bool IsPotentialUnboxedField(const Field& field); | 283 static bool IsPotentialUnboxedField(const Field& field); |
| 287 | 284 |
| 288 // Accessors. | 285 // Accessors. |
| 289 Assembler* assembler() const { return assembler_; } | 286 Assembler* assembler() const { return assembler_; } |
| 290 const ParsedFunction& parsed_function() const { return parsed_function_; } | 287 const ParsedFunction& parsed_function() const { return parsed_function_; } |
| 291 const GrowableArray<BlockEntryInstr*>& block_order() const { | 288 const GrowableArray<BlockEntryInstr*>& block_order() const { |
| 292 return block_order_; | 289 return block_order_; |
| 293 } | 290 } |
| 294 | 291 |
| 295 const FlowGraph& flow_graph() const { return flow_graph_; } | 292 const FlowGraph& flow_graph() const { return flow_graph_; } |
| 296 | 293 |
| 297 DescriptorList* pc_descriptors_list() const { | 294 DescriptorList* pc_descriptors_list() const { return pc_descriptors_list_; } |
| 298 return pc_descriptors_list_; | |
| 299 } | |
| 300 BlockEntryInstr* current_block() const { return current_block_; } | 295 BlockEntryInstr* current_block() const { return current_block_; } |
| 301 void set_current_block(BlockEntryInstr* value) { | 296 void set_current_block(BlockEntryInstr* value) { current_block_ = value; } |
| 302 current_block_ = value; | |
| 303 } | |
| 304 static bool CanOptimize(); | 297 static bool CanOptimize(); |
| 305 bool CanOptimizeFunction() const; | 298 bool CanOptimizeFunction() const; |
| 306 bool CanOSRFunction() const; | 299 bool CanOSRFunction() const; |
| 307 bool is_optimizing() const { return is_optimizing_; } | 300 bool is_optimizing() const { return is_optimizing_; } |
| 308 | 301 |
| 309 void EnterIntrinsicMode(); | 302 void EnterIntrinsicMode(); |
| 310 void ExitIntrinsicMode(); | 303 void ExitIntrinsicMode(); |
| 311 bool intrinsic_mode() const { return intrinsic_mode_; } | 304 bool intrinsic_mode() const { return intrinsic_mode_; } |
| 312 | 305 |
| 313 Label* intrinsic_slow_path_label() { | 306 Label* intrinsic_slow_path_label() { return &intrinsic_slow_path_label_; } |
| 314 return &intrinsic_slow_path_label_; | |
| 315 } | |
| 316 | 307 |
| 317 bool ForceSlowPathForStackOverflow() const; | 308 bool ForceSlowPathForStackOverflow() const; |
| 318 | 309 |
| 319 const GrowableArray<BlockInfo*>& block_info() const { return block_info_; } | 310 const GrowableArray<BlockInfo*>& block_info() const { return block_info_; } |
| 320 ParallelMoveResolver* parallel_move_resolver() { | 311 ParallelMoveResolver* parallel_move_resolver() { |
| 321 return ¶llel_move_resolver_; | 312 return ¶llel_move_resolver_; |
| 322 } | 313 } |
| 323 | 314 |
| 324 // Constructor is lighweight, major initialization work should occur here. | 315 // Constructor is lighweight, major initialization work should occur here. |
| 325 // This makes it easier to measure time spent in the compiler. | 316 // This makes it easier to measure time spent in the compiler. |
| 326 void InitCompiler(); | 317 void InitCompiler(); |
| 327 | 318 |
| 328 void CompileGraph(); | 319 void CompileGraph(); |
| 329 | 320 |
| 330 void VisitBlocks(); | 321 void VisitBlocks(); |
| 331 | 322 |
| 332 // Bail out of the flow graph compiler. Does not return to the caller. | 323 // Bail out of the flow graph compiler. Does not return to the caller. |
| 333 void Bailout(const char* reason); | 324 void Bailout(const char* reason); |
| 334 | 325 |
| 335 // Returns 'true' if regular code generation should be skipped. | 326 // Returns 'true' if regular code generation should be skipped. |
| 336 bool TryIntrinsify(); | 327 bool TryIntrinsify(); |
| 337 | 328 |
| 338 void GenerateAssertAssignable(TokenPosition token_pos, | 329 void GenerateAssertAssignable(TokenPosition token_pos, |
| 339 intptr_t deopt_id, | 330 intptr_t deopt_id, |
| 340 const AbstractType& dst_type, | 331 const AbstractType& dst_type, |
| 341 const String& dst_name, | 332 const String& dst_name, |
| 342 LocationSummary* locs); | 333 LocationSummary* locs); |
| 343 | 334 |
| 344 // DBC emits calls very differently from all other architectures due to its | 335 // DBC emits calls very differently from all other architectures due to its |
| 345 // interpreted nature. | 336 // interpreted nature. |
| 346 #if !defined(TARGET_ARCH_DBC) | 337 #if !defined(TARGET_ARCH_DBC) |
| 347 void GenerateRuntimeCall(TokenPosition token_pos, | 338 void GenerateRuntimeCall(TokenPosition token_pos, |
| 348 intptr_t deopt_id, | 339 intptr_t deopt_id, |
| 349 const RuntimeEntry& entry, | 340 const RuntimeEntry& entry, |
| 350 intptr_t argument_count, | 341 intptr_t argument_count, |
| 351 LocationSummary* locs); | 342 LocationSummary* locs); |
| 352 | 343 |
| 353 void GenerateCall(TokenPosition token_pos, | 344 void GenerateCall(TokenPosition token_pos, |
| 354 const StubEntry& stub_entry, | 345 const StubEntry& stub_entry, |
| 355 RawPcDescriptors::Kind kind, | 346 RawPcDescriptors::Kind kind, |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 387 LocationSummary* locs, | 378 LocationSummary* locs, |
| 388 const ICData& ic_data); | 379 const ICData& ic_data); |
| 389 | 380 |
| 390 void GenerateNumberTypeCheck(Register kClassIdReg, | 381 void GenerateNumberTypeCheck(Register kClassIdReg, |
| 391 const AbstractType& type, | 382 const AbstractType& type, |
| 392 Label* is_instance_lbl, | 383 Label* is_instance_lbl, |
| 393 Label* is_not_instance_lbl); | 384 Label* is_not_instance_lbl); |
| 394 void GenerateStringTypeCheck(Register kClassIdReg, | 385 void GenerateStringTypeCheck(Register kClassIdReg, |
| 395 Label* is_instance_lbl, | 386 Label* is_instance_lbl, |
| 396 Label* is_not_instance_lbl); | 387 Label* is_not_instance_lbl); |
| 397 void GenerateListTypeCheck(Register kClassIdReg, | 388 void GenerateListTypeCheck(Register kClassIdReg, Label* is_instance_lbl); |
| 398 Label* is_instance_lbl); | |
| 399 | 389 |
| 400 void EmitOptimizedInstanceCall(const StubEntry& stub_entry, | 390 void EmitOptimizedInstanceCall(const StubEntry& stub_entry, |
| 401 const ICData& ic_data, | 391 const ICData& ic_data, |
| 402 intptr_t argument_count, | 392 intptr_t argument_count, |
| 403 intptr_t deopt_id, | 393 intptr_t deopt_id, |
| 404 TokenPosition token_pos, | 394 TokenPosition token_pos, |
| 405 LocationSummary* locs); | 395 LocationSummary* locs); |
| 406 | 396 |
| 407 void EmitInstanceCall(const StubEntry& stub_entry, | 397 void EmitInstanceCall(const StubEntry& stub_entry, |
| 408 const ICData& ic_data, | 398 const ICData& ic_data, |
| 409 intptr_t argument_count, | 399 intptr_t argument_count, |
| 410 intptr_t deopt_id, | 400 intptr_t deopt_id, |
| 411 TokenPosition token_pos, | 401 TokenPosition token_pos, |
| 412 LocationSummary* locs); | 402 LocationSummary* locs); |
| 413 | 403 |
| 414 void EmitPolymorphicInstanceCall(const ICData& ic_data, | 404 void EmitPolymorphicInstanceCall(const ICData& ic_data, |
| 415 intptr_t argument_count, | 405 intptr_t argument_count, |
| 416 const Array& argument_names, | 406 const Array& argument_names, |
| 417 intptr_t deopt_id, | 407 intptr_t deopt_id, |
| 418 TokenPosition token_pos, | 408 TokenPosition token_pos, |
| 419 LocationSummary* locs, | 409 LocationSummary* locs, |
| 420 bool complete); | 410 bool complete); |
| 421 | 411 |
| 422 // Pass a value for try-index where block is not available (e.g. slow path). | 412 // Pass a value for try-index where block is not available (e.g. slow path). |
| 423 void EmitMegamorphicInstanceCall( | 413 void EmitMegamorphicInstanceCall(const ICData& ic_data, |
| 424 const ICData& ic_data, | 414 intptr_t argument_count, |
| 425 intptr_t argument_count, | 415 intptr_t deopt_id, |
| 426 intptr_t deopt_id, | 416 TokenPosition token_pos, |
| 427 TokenPosition token_pos, | 417 LocationSummary* locs, |
| 428 LocationSummary* locs, | 418 intptr_t try_index, |
| 429 intptr_t try_index, | 419 intptr_t slow_path_argument_count = 0); |
| 430 intptr_t slow_path_argument_count = 0); | |
| 431 | 420 |
| 432 void EmitSwitchableInstanceCall(const ICData& ic_data, | 421 void EmitSwitchableInstanceCall(const ICData& ic_data, |
| 433 intptr_t argument_count, | 422 intptr_t argument_count, |
| 434 intptr_t deopt_id, | 423 intptr_t deopt_id, |
| 435 TokenPosition token_pos, | 424 TokenPosition token_pos, |
| 436 LocationSummary* locs); | 425 LocationSummary* locs); |
| 437 | 426 |
| 438 void EmitTestAndCall(const ICData& ic_data, | 427 void EmitTestAndCall(const ICData& ic_data, |
| 439 intptr_t arg_count, | 428 intptr_t arg_count, |
| 440 const Array& arg_names, | 429 const Array& arg_names, |
| (...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 564 Thread* thread() const { return thread_; } | 553 Thread* thread() const { return thread_; } |
| 565 Isolate* isolate() const { return thread_->isolate(); } | 554 Isolate* isolate() const { return thread_->isolate(); } |
| 566 Zone* zone() const { return zone_; } | 555 Zone* zone() const { return zone_; } |
| 567 | 556 |
| 568 void AddStubCallTarget(const Code& code); | 557 void AddStubCallTarget(const Code& code); |
| 569 | 558 |
| 570 const Array& inlined_code_intervals() const { | 559 const Array& inlined_code_intervals() const { |
| 571 return inlined_code_intervals_; | 560 return inlined_code_intervals_; |
| 572 } | 561 } |
| 573 | 562 |
| 574 RawArray* edge_counters_array() const { | 563 RawArray* edge_counters_array() const { return edge_counters_array_.raw(); } |
| 575 return edge_counters_array_.raw(); | |
| 576 } | |
| 577 | 564 |
| 578 RawArray* InliningIdToFunction() const; | 565 RawArray* InliningIdToFunction() const; |
| 579 RawArray* InliningIdToTokenPos() const; | 566 RawArray* InliningIdToTokenPos() const; |
| 580 RawArray* CallerInliningIdMap() const; | 567 RawArray* CallerInliningIdMap() const; |
| 581 | 568 |
| 582 CodeSourceMapBuilder* code_source_map_builder() { | 569 CodeSourceMapBuilder* code_source_map_builder() { |
| 583 if (code_source_map_builder_ == NULL) { | 570 if (code_source_map_builder_ == NULL) { |
| 584 code_source_map_builder_ = new CodeSourceMapBuilder(); | 571 code_source_map_builder_ = new CodeSourceMapBuilder(); |
| 585 } | 572 } |
| 586 ASSERT(code_source_map_builder_ != NULL); | 573 ASSERT(code_source_map_builder_ != NULL); |
| (...skipping 30 matching lines...) Expand all Loading... |
| 617 intptr_t deopt_id, | 604 intptr_t deopt_id, |
| 618 TokenPosition token_pos, | 605 TokenPosition token_pos, |
| 619 LocationSummary* locs); | 606 LocationSummary* locs); |
| 620 | 607 |
| 621 void EmitUnoptimizedStaticCall(intptr_t argument_count, | 608 void EmitUnoptimizedStaticCall(intptr_t argument_count, |
| 622 intptr_t deopt_id, | 609 intptr_t deopt_id, |
| 623 TokenPosition token_pos, | 610 TokenPosition token_pos, |
| 624 LocationSummary* locs, | 611 LocationSummary* locs, |
| 625 const ICData& ic_data); | 612 const ICData& ic_data); |
| 626 | 613 |
| 627 // DBC handles type tests differently from all other architectures due | 614 // DBC handles type tests differently from all other architectures due |
| 628 // to its interpreted nature. | 615 // to its interpreted nature. |
| 629 #if !defined(TARGET_ARCH_DBC) | 616 #if !defined(TARGET_ARCH_DBC) |
| 630 // Type checking helper methods. | 617 // Type checking helper methods. |
| 631 void CheckClassIds(Register class_id_reg, | 618 void CheckClassIds(Register class_id_reg, |
| 632 const GrowableArray<intptr_t>& class_ids, | 619 const GrowableArray<intptr_t>& class_ids, |
| 633 Label* is_instance_lbl, | 620 Label* is_instance_lbl, |
| 634 Label* is_not_instance_lbl); | 621 Label* is_not_instance_lbl); |
| 635 | 622 |
| 636 RawSubtypeTestCache* GenerateInlineInstanceof(TokenPosition token_pos, | 623 RawSubtypeTestCache* GenerateInlineInstanceof(TokenPosition token_pos, |
| 637 const AbstractType& type, | 624 const AbstractType& type, |
| 638 Label* is_instance_lbl, | 625 Label* is_instance_lbl, |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 703 | 690 |
| 704 intptr_t GetOptimizationThreshold() const; | 691 intptr_t GetOptimizationThreshold() const; |
| 705 | 692 |
| 706 StackmapTableBuilder* stackmap_table_builder() { | 693 StackmapTableBuilder* stackmap_table_builder() { |
| 707 if (stackmap_table_builder_ == NULL) { | 694 if (stackmap_table_builder_ == NULL) { |
| 708 stackmap_table_builder_ = new StackmapTableBuilder(); | 695 stackmap_table_builder_ = new StackmapTableBuilder(); |
| 709 } | 696 } |
| 710 return stackmap_table_builder_; | 697 return stackmap_table_builder_; |
| 711 } | 698 } |
| 712 | 699 |
| 713 // TODO(vegorov) re-enable frame state tracking on DBC. It is | 700 // TODO(vegorov) re-enable frame state tracking on DBC. It is |
| 714 // currently disabled because it relies on LocationSummaries and | 701 // currently disabled because it relies on LocationSummaries and |
| 715 // we don't use them during unoptimized compilation on DBC. | 702 // we don't use them during unoptimized compilation on DBC. |
| 716 #if defined(DEBUG) && !defined(TARGET_ARCH_DBC) | 703 #if defined(DEBUG) && !defined(TARGET_ARCH_DBC) |
| 717 void FrameStateUpdateWith(Instruction* instr); | 704 void FrameStateUpdateWith(Instruction* instr); |
| 718 void FrameStatePush(Definition* defn); | 705 void FrameStatePush(Definition* defn); |
| 719 void FrameStatePop(intptr_t count); | 706 void FrameStatePop(intptr_t count); |
| 720 bool FrameStateIsSafeToCall(); | 707 bool FrameStateIsSafeToCall(); |
| 721 void FrameStateClear(); | 708 void FrameStateClear(); |
| 722 #endif | 709 #endif |
| 723 | 710 |
| 724 // This struct contains either function or code, the other one being NULL. | 711 // This struct contains either function or code, the other one being NULL. |
| 725 class StaticCallsStruct : public ZoneAllocated { | 712 class StaticCallsStruct : public ZoneAllocated { |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 796 const GrowableArray<const Function*>& inline_id_to_function_; | 783 const GrowableArray<const Function*>& inline_id_to_function_; |
| 797 const GrowableArray<TokenPosition>& inline_id_to_token_pos_; | 784 const GrowableArray<TokenPosition>& inline_id_to_token_pos_; |
| 798 const GrowableArray<intptr_t>& caller_inline_id_; | 785 const GrowableArray<intptr_t>& caller_inline_id_; |
| 799 | 786 |
| 800 DISALLOW_COPY_AND_ASSIGN(FlowGraphCompiler); | 787 DISALLOW_COPY_AND_ASSIGN(FlowGraphCompiler); |
| 801 }; | 788 }; |
| 802 | 789 |
| 803 } // namespace dart | 790 } // namespace dart |
| 804 | 791 |
| 805 #endif // RUNTIME_VM_FLOW_GRAPH_COMPILER_H_ | 792 #endif // RUNTIME_VM_FLOW_GRAPH_COMPILER_H_ |
| OLD | NEW |