OLD | NEW |
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #ifndef RUNTIME_VM_FLOW_GRAPH_COMPILER_H_ | 5 #ifndef RUNTIME_VM_FLOW_GRAPH_COMPILER_H_ |
6 #define RUNTIME_VM_FLOW_GRAPH_COMPILER_H_ | 6 #define RUNTIME_VM_FLOW_GRAPH_COMPILER_H_ |
7 | 7 |
8 #include "vm/allocation.h" | 8 #include "vm/allocation.h" |
9 #include "vm/assembler.h" | 9 #include "vm/assembler.h" |
10 #include "vm/code_descriptors.h" | 10 #include "vm/code_descriptors.h" |
(...skipping 460 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
471 bool CanFallThroughTo(BlockEntryInstr* block_entry) const; | 471 bool CanFallThroughTo(BlockEntryInstr* block_entry) const; |
472 | 472 |
473 // Return true-, false- and fall-through label for a branch instruction. | 473 // Return true-, false- and fall-through label for a branch instruction. |
474 BranchLabels CreateBranchLabels(BranchInstr* branch) const; | 474 BranchLabels CreateBranchLabels(BranchInstr* branch) const; |
475 | 475 |
476 void AddExceptionHandler(intptr_t try_index, | 476 void AddExceptionHandler(intptr_t try_index, |
477 intptr_t outer_try_index, | 477 intptr_t outer_try_index, |
478 intptr_t pc_offset, | 478 intptr_t pc_offset, |
479 const Array& handler_types, | 479 const Array& handler_types, |
480 bool needs_stacktrace); | 480 bool needs_stacktrace); |
481 void SetNeedsStacktrace(intptr_t try_index); | 481 void SetNeedsStackTrace(intptr_t try_index); |
482 void AddCurrentDescriptor(RawPcDescriptors::Kind kind, | 482 void AddCurrentDescriptor(RawPcDescriptors::Kind kind, |
483 intptr_t deopt_id, | 483 intptr_t deopt_id, |
484 TokenPosition token_pos); | 484 TokenPosition token_pos); |
485 | 485 |
486 void RecordSafepoint(LocationSummary* locs, | 486 void RecordSafepoint(LocationSummary* locs, |
487 intptr_t slow_path_argument_count = 0); | 487 intptr_t slow_path_argument_count = 0); |
488 | 488 |
489 Label* AddDeoptStub(intptr_t deopt_id, | 489 Label* AddDeoptStub(intptr_t deopt_id, |
490 ICData::DeoptReasonId reason, | 490 ICData::DeoptReasonId reason, |
491 uint32_t flags = 0); | 491 uint32_t flags = 0); |
492 | 492 |
493 #if defined(TARGET_ARCH_DBC) | 493 #if defined(TARGET_ARCH_DBC) |
494 void EmitDeopt(intptr_t deopt_id, | 494 void EmitDeopt(intptr_t deopt_id, |
495 ICData::DeoptReasonId reason, | 495 ICData::DeoptReasonId reason, |
496 uint32_t flags = 0); | 496 uint32_t flags = 0); |
497 | 497 |
498 // If the cid does not fit in 16 bits, then this will cause a bailout. | 498 // If the cid does not fit in 16 bits, then this will cause a bailout. |
499 uint16_t ToEmbeddableCid(intptr_t cid, Instruction* instruction); | 499 uint16_t ToEmbeddableCid(intptr_t cid, Instruction* instruction); |
500 #endif // defined(TARGET_ARCH_DBC) | 500 #endif // defined(TARGET_ARCH_DBC) |
501 | 501 |
502 void AddDeoptIndexAtCall(intptr_t deopt_id); | 502 void AddDeoptIndexAtCall(intptr_t deopt_id); |
503 | 503 |
504 void AddSlowPathCode(SlowPathCode* slow_path); | 504 void AddSlowPathCode(SlowPathCode* slow_path); |
505 | 505 |
506 void FinalizeExceptionHandlers(const Code& code); | 506 void FinalizeExceptionHandlers(const Code& code); |
507 void FinalizePcDescriptors(const Code& code); | 507 void FinalizePcDescriptors(const Code& code); |
508 RawArray* CreateDeoptInfo(Assembler* assembler); | 508 RawArray* CreateDeoptInfo(Assembler* assembler); |
509 void FinalizeStackmaps(const Code& code); | 509 void FinalizeStackMaps(const Code& code); |
510 void FinalizeVarDescriptors(const Code& code); | 510 void FinalizeVarDescriptors(const Code& code); |
511 void FinalizeStaticCallTargetsTable(const Code& code); | 511 void FinalizeStaticCallTargetsTable(const Code& code); |
512 | 512 |
513 const Class& double_class() const { return double_class_; } | 513 const Class& double_class() const { return double_class_; } |
514 const Class& mint_class() const { return mint_class_; } | 514 const Class& mint_class() const { return mint_class_; } |
515 const Class& float32x4_class() const { return float32x4_class_; } | 515 const Class& float32x4_class() const { return float32x4_class_; } |
516 const Class& float64x2_class() const { return float64x2_class_; } | 516 const Class& float64x2_class() const { return float64x2_class_; } |
517 const Class& int32x4_class() const { return int32x4_class_; } | 517 const Class& int32x4_class() const { return int32x4_class_; } |
518 | 518 |
519 const Class& BoxClassFor(Representation rep); | 519 const Class& BoxClassFor(Representation rep); |
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
692 void CompactBlocks(); | 692 void CompactBlocks(); |
693 | 693 |
694 bool IsListClass(const Class& cls) const { | 694 bool IsListClass(const Class& cls) const { |
695 return cls.raw() == list_class_.raw(); | 695 return cls.raw() == list_class_.raw(); |
696 } | 696 } |
697 | 697 |
698 void EmitSourceLine(Instruction* instr); | 698 void EmitSourceLine(Instruction* instr); |
699 | 699 |
700 intptr_t GetOptimizationThreshold() const; | 700 intptr_t GetOptimizationThreshold() const; |
701 | 701 |
702 StackmapTableBuilder* stackmap_table_builder() { | 702 StackMapTableBuilder* stackmap_table_builder() { |
703 if (stackmap_table_builder_ == NULL) { | 703 if (stackmap_table_builder_ == NULL) { |
704 stackmap_table_builder_ = new StackmapTableBuilder(); | 704 stackmap_table_builder_ = new StackMapTableBuilder(); |
705 } | 705 } |
706 return stackmap_table_builder_; | 706 return stackmap_table_builder_; |
707 } | 707 } |
708 | 708 |
709 // TODO(vegorov) re-enable frame state tracking on DBC. It is | 709 // TODO(vegorov) re-enable frame state tracking on DBC. It is |
710 // currently disabled because it relies on LocationSummaries and | 710 // currently disabled because it relies on LocationSummaries and |
711 // we don't use them during unoptimized compilation on DBC. | 711 // we don't use them during unoptimized compilation on DBC. |
712 #if defined(DEBUG) && !defined(TARGET_ARCH_DBC) | 712 #if defined(DEBUG) && !defined(TARGET_ARCH_DBC) |
713 void FrameStateUpdateWith(Instruction* instr); | 713 void FrameStateUpdateWith(Instruction* instr); |
714 void FrameStatePush(Definition* defn); | 714 void FrameStatePush(Definition* defn); |
(...skipping 30 matching lines...) Expand all Loading... |
745 #if defined(DEBUG) | 745 #if defined(DEBUG) |
746 GrowableArray<Representation> frame_state_; | 746 GrowableArray<Representation> frame_state_; |
747 #endif | 747 #endif |
748 | 748 |
749 // Compiler specific per-block state. Indexed by postorder block number | 749 // Compiler specific per-block state. Indexed by postorder block number |
750 // for convenience. This is not the block's index in the block order, | 750 // for convenience. This is not the block's index in the block order, |
751 // which is reverse postorder. | 751 // which is reverse postorder. |
752 BlockEntryInstr* current_block_; | 752 BlockEntryInstr* current_block_; |
753 ExceptionHandlerList* exception_handlers_list_; | 753 ExceptionHandlerList* exception_handlers_list_; |
754 DescriptorList* pc_descriptors_list_; | 754 DescriptorList* pc_descriptors_list_; |
755 StackmapTableBuilder* stackmap_table_builder_; | 755 StackMapTableBuilder* stackmap_table_builder_; |
756 CodeSourceMapBuilder* code_source_map_builder_; | 756 CodeSourceMapBuilder* code_source_map_builder_; |
757 intptr_t saved_code_size_; | 757 intptr_t saved_code_size_; |
758 GrowableArray<BlockInfo*> block_info_; | 758 GrowableArray<BlockInfo*> block_info_; |
759 GrowableArray<CompilerDeoptInfo*> deopt_infos_; | 759 GrowableArray<CompilerDeoptInfo*> deopt_infos_; |
760 GrowableArray<SlowPathCode*> slow_path_code_; | 760 GrowableArray<SlowPathCode*> slow_path_code_; |
761 // Stores static call targets as well as stub targets. | 761 // Stores static call targets as well as stub targets. |
762 // TODO(srdjan): Evaluate if we should store allocation stub targets into a | 762 // TODO(srdjan): Evaluate if we should store allocation stub targets into a |
763 // separate table? | 763 // separate table? |
764 GrowableArray<StaticCallsStruct*> static_calls_target_table_; | 764 GrowableArray<StaticCallsStruct*> static_calls_target_table_; |
765 const bool is_optimizing_; | 765 const bool is_optimizing_; |
(...skipping 26 matching lines...) Expand all Loading... |
792 const GrowableArray<const Function*>& inline_id_to_function_; | 792 const GrowableArray<const Function*>& inline_id_to_function_; |
793 const GrowableArray<TokenPosition>& inline_id_to_token_pos_; | 793 const GrowableArray<TokenPosition>& inline_id_to_token_pos_; |
794 const GrowableArray<intptr_t>& caller_inline_id_; | 794 const GrowableArray<intptr_t>& caller_inline_id_; |
795 | 795 |
796 DISALLOW_COPY_AND_ASSIGN(FlowGraphCompiler); | 796 DISALLOW_COPY_AND_ASSIGN(FlowGraphCompiler); |
797 }; | 797 }; |
798 | 798 |
799 } // namespace dart | 799 } // namespace dart |
800 | 800 |
801 #endif // RUNTIME_VM_FLOW_GRAPH_COMPILER_H_ | 801 #endif // RUNTIME_VM_FLOW_GRAPH_COMPILER_H_ |
OLD | NEW |