Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(517)

Side by Side Diff: runtime/vm/intermediate_language_x64.cc

Issue 2584613002: PATCH (not to be comitted): Support for printing instruction statistics
Patch Set: Fixed polymorphic call inside try, added more tags for remaining unknown code Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/intermediate_language_arm.cc ('k') | runtime/vm/object.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64.
6 #if defined(TARGET_ARCH_X64) 6 #if defined(TARGET_ARCH_X64)
7 7
8 #include "vm/intermediate_language.h" 8 #include "vm/intermediate_language.h"
9 9
10 #include "vm/compiler.h" 10 #include "vm/compiler.h"
(...skipping 1708 matching lines...) Expand 10 before | Expand all | Expand 10 after
1719 1719
1720 1720
1721 class BoxAllocationSlowPath : public SlowPathCode { 1721 class BoxAllocationSlowPath : public SlowPathCode {
1722 public: 1722 public:
1723 BoxAllocationSlowPath(Instruction* instruction, 1723 BoxAllocationSlowPath(Instruction* instruction,
1724 const Class& cls, 1724 const Class& cls,
1725 Register result) 1725 Register result)
1726 : instruction_(instruction), cls_(cls), result_(result) {} 1726 : instruction_(instruction), cls_(cls), result_(result) {}
1727 1727
1728 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 1728 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
1729 compiler->SpecialStatsBegin(
1730 CombinedCodeStatistics::kTagBoxAllocationSlowPath);
1729 if (Assembler::EmittingComments()) { 1731 if (Assembler::EmittingComments()) {
1730 __ Comment("%s slow path allocation of %s", instruction_->DebugName(), 1732 __ Comment("%s slow path allocation of %s", instruction_->DebugName(),
1731 String::Handle(cls_.ScrubbedName()).ToCString()); 1733 String::Handle(cls_.ScrubbedName()).ToCString());
1732 } 1734 }
1733 __ Bind(entry_label()); 1735 __ Bind(entry_label());
1734 const Code& stub = Code::ZoneHandle( 1736 const Code& stub = Code::ZoneHandle(
1735 compiler->zone(), StubCode::GetAllocationStubForClass(cls_)); 1737 compiler->zone(), StubCode::GetAllocationStubForClass(cls_));
1736 const StubEntry stub_entry(stub); 1738 const StubEntry stub_entry(stub);
1737 1739
1738 LocationSummary* locs = instruction_->locs(); 1740 LocationSummary* locs = instruction_->locs();
1739 1741
1740 locs->live_registers()->Remove(Location::RegisterLocation(result_)); 1742 locs->live_registers()->Remove(Location::RegisterLocation(result_));
1741 1743
1742 compiler->SaveLiveRegisters(locs); 1744 compiler->SaveLiveRegisters(locs);
1743 compiler->GenerateCall(TokenPosition::kNoSource, // No token position. 1745 compiler->GenerateCall(TokenPosition::kNoSource, // No token position.
1744 stub_entry, RawPcDescriptors::kOther, locs); 1746 stub_entry, RawPcDescriptors::kOther, locs);
1745 compiler->AddStubCallTarget(stub); 1747 compiler->AddStubCallTarget(stub);
1746 __ MoveRegister(result_, RAX); 1748 __ MoveRegister(result_, RAX);
1747 compiler->RestoreLiveRegisters(locs); 1749 compiler->RestoreLiveRegisters(locs);
1748 __ jmp(exit_label()); 1750 __ jmp(exit_label());
1751 compiler->SpecialStatsEnd(
1752 CombinedCodeStatistics::kTagBoxAllocationSlowPath);
1749 } 1753 }
1750 1754
1751 static void Allocate(FlowGraphCompiler* compiler, 1755 static void Allocate(FlowGraphCompiler* compiler,
1752 Instruction* instruction, 1756 Instruction* instruction,
1753 const Class& cls, 1757 const Class& cls,
1754 Register result, 1758 Register result,
1755 Register temp) { 1759 Register temp) {
1756 if (compiler->intrinsic_mode()) { 1760 if (compiler->intrinsic_mode()) {
1757 __ TryAllocate(cls, compiler->intrinsic_slow_path_label(), 1761 __ TryAllocate(cls, compiler->intrinsic_slow_path_label(),
1758 Assembler::kFarJump, result, temp); 1762 Assembler::kFarJump, result, temp);
(...skipping 638 matching lines...) Expand 10 before | Expand all | Expand 10 after
2397 } 2401 }
2398 2402
2399 2403
2400 class AllocateContextSlowPath : public SlowPathCode { 2404 class AllocateContextSlowPath : public SlowPathCode {
2401 public: 2405 public:
2402 explicit AllocateContextSlowPath( 2406 explicit AllocateContextSlowPath(
2403 AllocateUninitializedContextInstr* instruction) 2407 AllocateUninitializedContextInstr* instruction)
2404 : instruction_(instruction) {} 2408 : instruction_(instruction) {}
2405 2409
2406 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 2410 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
2411 compiler->SpecialStatsBegin(
2412 CombinedCodeStatistics::kTagAllocateContextSlowPath);
2407 __ Comment("AllocateContextSlowPath"); 2413 __ Comment("AllocateContextSlowPath");
2408 __ Bind(entry_label()); 2414 __ Bind(entry_label());
2409 2415
2410 LocationSummary* locs = instruction_->locs(); 2416 LocationSummary* locs = instruction_->locs();
2411 locs->live_registers()->Remove(locs->out(0)); 2417 locs->live_registers()->Remove(locs->out(0));
2412 2418
2413 compiler->SaveLiveRegisters(locs); 2419 compiler->SaveLiveRegisters(locs);
2414 2420
2415 __ LoadImmediate(R10, Immediate(instruction_->num_context_variables())); 2421 __ LoadImmediate(R10, Immediate(instruction_->num_context_variables()));
2416 const Code& stub = Code::ZoneHandle( 2422 const Code& stub = Code::ZoneHandle(
2417 compiler->zone(), StubCode::AllocateContext_entry()->code()); 2423 compiler->zone(), StubCode::AllocateContext_entry()->code());
2418 compiler->AddStubCallTarget(stub); 2424 compiler->AddStubCallTarget(stub);
2419 compiler->GenerateCall(instruction_->token_pos(), 2425 compiler->GenerateCall(instruction_->token_pos(),
2420 *StubCode::AllocateContext_entry(), 2426 *StubCode::AllocateContext_entry(),
2421 RawPcDescriptors::kOther, locs); 2427 RawPcDescriptors::kOther, locs);
2422 ASSERT(instruction_->locs()->out(0).reg() == RAX); 2428 ASSERT(instruction_->locs()->out(0).reg() == RAX);
2423 compiler->RestoreLiveRegisters(instruction_->locs()); 2429 compiler->RestoreLiveRegisters(instruction_->locs());
2424 __ jmp(exit_label()); 2430 __ jmp(exit_label());
2431 compiler->SpecialStatsEnd(
2432 CombinedCodeStatistics::kTagAllocateContextSlowPath);
2425 } 2433 }
2426 2434
2427 private: 2435 private:
2428 AllocateUninitializedContextInstr* instruction_; 2436 AllocateUninitializedContextInstr* instruction_;
2429 }; 2437 };
2430 2438
2431 2439
2432 void AllocateUninitializedContextInstr::EmitNativeCode( 2440 void AllocateUninitializedContextInstr::EmitNativeCode(
2433 FlowGraphCompiler* compiler) { 2441 FlowGraphCompiler* compiler) {
2434 ASSERT(compiler->is_optimizing()); 2442 ASSERT(compiler->is_optimizing());
(...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after
2624 return summary; 2632 return summary;
2625 } 2633 }
2626 2634
2627 2635
2628 class CheckStackOverflowSlowPath : public SlowPathCode { 2636 class CheckStackOverflowSlowPath : public SlowPathCode {
2629 public: 2637 public:
2630 explicit CheckStackOverflowSlowPath(CheckStackOverflowInstr* instruction) 2638 explicit CheckStackOverflowSlowPath(CheckStackOverflowInstr* instruction)
2631 : instruction_(instruction) {} 2639 : instruction_(instruction) {}
2632 2640
2633 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 2641 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
2642 compiler->SpecialStatsBegin(
2643 CombinedCodeStatistics::kTagCheckStackOverflowSlowPath);
2634 if (FLAG_use_osr && osr_entry_label()->IsLinked()) { 2644 if (FLAG_use_osr && osr_entry_label()->IsLinked()) {
2635 __ Comment("CheckStackOverflowSlowPathOsr"); 2645 __ Comment("CheckStackOverflowSlowPathOsr");
2636 __ Bind(osr_entry_label()); 2646 __ Bind(osr_entry_label());
2637 __ movq(Address(THR, Thread::stack_overflow_flags_offset()), 2647 __ movq(Address(THR, Thread::stack_overflow_flags_offset()),
2638 Immediate(Thread::kOsrRequest)); 2648 Immediate(Thread::kOsrRequest));
2639 } 2649 }
2640 __ Comment("CheckStackOverflowSlowPath"); 2650 __ Comment("CheckStackOverflowSlowPath");
2641 __ Bind(entry_label()); 2651 __ Bind(entry_label());
2642 compiler->SaveLiveRegisters(instruction_->locs()); 2652 compiler->SaveLiveRegisters(instruction_->locs());
2643 // pending_deoptimization_env_ is needed to generate a runtime call that 2653 // pending_deoptimization_env_ is needed to generate a runtime call that
2644 // may throw an exception. 2654 // may throw an exception.
2645 ASSERT(compiler->pending_deoptimization_env_ == NULL); 2655 ASSERT(compiler->pending_deoptimization_env_ == NULL);
2646 Environment* env = compiler->SlowPathEnvironmentFor(instruction_); 2656 Environment* env = compiler->SlowPathEnvironmentFor(instruction_);
2647 compiler->pending_deoptimization_env_ = env; 2657 compiler->pending_deoptimization_env_ = env;
2648 compiler->GenerateRuntimeCall( 2658 compiler->GenerateRuntimeCall(
2649 instruction_->token_pos(), instruction_->deopt_id(), 2659 instruction_->token_pos(), instruction_->deopt_id(),
2650 kStackOverflowRuntimeEntry, 0, instruction_->locs()); 2660 kStackOverflowRuntimeEntry, 0, instruction_->locs());
2651 2661
2652 if (FLAG_use_osr && !compiler->is_optimizing() && instruction_->in_loop()) { 2662 if (FLAG_use_osr && !compiler->is_optimizing() && instruction_->in_loop()) {
2653 // In unoptimized code, record loop stack checks as possible OSR entries. 2663 // In unoptimized code, record loop stack checks as possible OSR entries.
2654 compiler->AddCurrentDescriptor(RawPcDescriptors::kOsrEntry, 2664 compiler->AddCurrentDescriptor(RawPcDescriptors::kOsrEntry,
2655 instruction_->deopt_id(), 2665 instruction_->deopt_id(),
2656 TokenPosition::kNoSource); 2666 TokenPosition::kNoSource);
2657 } 2667 }
2658 compiler->pending_deoptimization_env_ = NULL; 2668 compiler->pending_deoptimization_env_ = NULL;
2659 compiler->RestoreLiveRegisters(instruction_->locs()); 2669 compiler->RestoreLiveRegisters(instruction_->locs());
2660 __ jmp(exit_label()); 2670 __ jmp(exit_label());
2671 compiler->SpecialStatsEnd(
2672 CombinedCodeStatistics::kTagCheckStackOverflowSlowPath);
2661 } 2673 }
2662 2674
2663 2675
2664 Label* osr_entry_label() { 2676 Label* osr_entry_label() {
2665 ASSERT(FLAG_use_osr); 2677 ASSERT(FLAG_use_osr);
2666 return &osr_entry_label_; 2678 return &osr_entry_label_;
2667 } 2679 }
2668 2680
2669 private: 2681 private:
2670 CheckStackOverflowInstr* instruction_; 2682 CheckStackOverflowInstr* instruction_;
(...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after
2811 } 2823 }
2812 } 2824 }
2813 2825
2814 2826
2815 class CheckedSmiSlowPath : public SlowPathCode { 2827 class CheckedSmiSlowPath : public SlowPathCode {
2816 public: 2828 public:
2817 CheckedSmiSlowPath(CheckedSmiOpInstr* instruction, intptr_t try_index) 2829 CheckedSmiSlowPath(CheckedSmiOpInstr* instruction, intptr_t try_index)
2818 : instruction_(instruction), try_index_(try_index) {} 2830 : instruction_(instruction), try_index_(try_index) {}
2819 2831
2820 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 2832 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
2833 compiler->SpecialStatsBegin(CombinedCodeStatistics::kTagCheckedSmiSlowPath);
2821 if (Assembler::EmittingComments()) { 2834 if (Assembler::EmittingComments()) {
2822 __ Comment("slow path smi operation"); 2835 __ Comment("slow path smi operation");
2823 } 2836 }
2824 __ Bind(entry_label()); 2837 __ Bind(entry_label());
2825 LocationSummary* locs = instruction_->locs(); 2838 LocationSummary* locs = instruction_->locs();
2826 Register result = locs->out(0).reg(); 2839 Register result = locs->out(0).reg();
2827 locs->live_registers()->Remove(Location::RegisterLocation(result)); 2840 locs->live_registers()->Remove(Location::RegisterLocation(result));
2828 2841
2829 compiler->SaveLiveRegisters(locs); 2842 compiler->SaveLiveRegisters(locs);
2830 __ pushq(locs->in(0).reg()); 2843 __ pushq(locs->in(0).reg());
2831 __ pushq(locs->in(1).reg()); 2844 __ pushq(locs->in(1).reg());
2832 compiler->EmitMegamorphicInstanceCall( 2845 compiler->EmitMegamorphicInstanceCall(
2833 *instruction_->call()->ic_data(), instruction_->call()->ArgumentCount(), 2846 *instruction_->call()->ic_data(), instruction_->call()->ArgumentCount(),
2834 instruction_->call()->deopt_id(), instruction_->call()->token_pos(), 2847 instruction_->call()->deopt_id(), instruction_->call()->token_pos(),
2835 locs, try_index_, 2848 locs, try_index_,
2836 /* slow_path_argument_count = */ 2); 2849 /* slow_path_argument_count = */ 2);
2837 __ MoveRegister(result, RAX); 2850 __ MoveRegister(result, RAX);
2838 compiler->RestoreLiveRegisters(locs); 2851 compiler->RestoreLiveRegisters(locs);
2839 __ jmp(exit_label()); 2852 __ jmp(exit_label());
2853 compiler->SpecialStatsEnd(CombinedCodeStatistics::kTagCheckedSmiSlowPath);
2840 } 2854 }
2841 2855
2842 private: 2856 private:
2843 CheckedSmiOpInstr* instruction_; 2857 CheckedSmiOpInstr* instruction_;
2844 intptr_t try_index_; 2858 intptr_t try_index_;
2845 }; 2859 };
2846 2860
2847 2861
2848 LocationSummary* CheckedSmiOpInstr::MakeLocationSummary(Zone* zone, 2862 LocationSummary* CheckedSmiOpInstr::MakeLocationSummary(Zone* zone,
2849 bool opt) const { 2863 bool opt) const {
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
2971 CheckedSmiComparisonSlowPath(CheckedSmiComparisonInstr* instruction, 2985 CheckedSmiComparisonSlowPath(CheckedSmiComparisonInstr* instruction,
2972 intptr_t try_index, 2986 intptr_t try_index,
2973 BranchLabels labels, 2987 BranchLabels labels,
2974 bool merged = false) 2988 bool merged = false)
2975 : instruction_(instruction), 2989 : instruction_(instruction),
2976 try_index_(try_index), 2990 try_index_(try_index),
2977 labels_(labels), 2991 labels_(labels),
2978 merged_(merged) {} 2992 merged_(merged) {}
2979 2993
2980 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 2994 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
2995 compiler->SpecialStatsBegin(CombinedCodeStatistics::kTagCheckedSmiCmpSlowPat h);
2981 if (Assembler::EmittingComments()) { 2996 if (Assembler::EmittingComments()) {
2982 __ Comment("slow path smi comparison"); 2997 __ Comment("slow path smi comparison");
2983 } 2998 }
2984 __ Bind(entry_label()); 2999 __ Bind(entry_label());
2985 LocationSummary* locs = instruction_->locs(); 3000 LocationSummary* locs = instruction_->locs();
2986 Register result = merged_ ? locs->temp(0).reg() : locs->out(0).reg(); 3001 Register result = merged_ ? locs->temp(0).reg() : locs->out(0).reg();
2987 locs->live_registers()->Remove(Location::RegisterLocation(result)); 3002 locs->live_registers()->Remove(Location::RegisterLocation(result));
2988 3003
2989 compiler->SaveLiveRegisters(locs); 3004 compiler->SaveLiveRegisters(locs);
2990 __ pushq(locs->in(0).reg()); 3005 __ pushq(locs->in(0).reg());
2991 __ pushq(locs->in(1).reg()); 3006 __ pushq(locs->in(1).reg());
2992 compiler->EmitMegamorphicInstanceCall( 3007 compiler->EmitMegamorphicInstanceCall(
2993 *instruction_->call()->ic_data(), instruction_->call()->ArgumentCount(), 3008 *instruction_->call()->ic_data(), instruction_->call()->ArgumentCount(),
2994 instruction_->call()->deopt_id(), instruction_->call()->token_pos(), 3009 instruction_->call()->deopt_id(), instruction_->call()->token_pos(),
2995 locs, try_index_, 3010 locs, try_index_,
2996 /* slow_path_argument_count = */ 2); 3011 /* slow_path_argument_count = */ 2);
2997 __ MoveRegister(result, RAX); 3012 __ MoveRegister(result, RAX);
2998 compiler->RestoreLiveRegisters(locs); 3013 compiler->RestoreLiveRegisters(locs);
2999 if (merged_) { 3014 if (merged_) {
3000 __ CompareObject(result, Bool::True()); 3015 __ CompareObject(result, Bool::True());
3001 __ j(EQUAL, instruction_->is_negated() ? labels_.false_label 3016 __ j(EQUAL, instruction_->is_negated() ? labels_.false_label
3002 : labels_.true_label); 3017 : labels_.true_label);
3003 __ jmp(instruction_->is_negated() ? labels_.true_label 3018 __ jmp(instruction_->is_negated() ? labels_.true_label
3004 : labels_.false_label); 3019 : labels_.false_label);
3005 } else { 3020 } else {
3006 __ jmp(exit_label()); 3021 __ jmp(exit_label());
3007 } 3022 }
3023 compiler->SpecialStatsEnd(CombinedCodeStatistics::kTagCheckedSmiCmpSlowPath) ;
3008 } 3024 }
3009 3025
3010 private: 3026 private:
3011 CheckedSmiComparisonInstr* instruction_; 3027 CheckedSmiComparisonInstr* instruction_;
3012 intptr_t try_index_; 3028 intptr_t try_index_;
3013 BranchLabels labels_; 3029 BranchLabels labels_;
3014 bool merged_; 3030 bool merged_;
3015 }; 3031 };
3016 3032
3017 3033
(...skipping 3790 matching lines...) Expand 10 before | Expand all | Expand 10 after
6808 __ Drop(1); 6824 __ Drop(1);
6809 __ popq(result); 6825 __ popq(result);
6810 } 6826 }
6811 6827
6812 6828
6813 } // namespace dart 6829 } // namespace dart
6814 6830
6815 #undef __ 6831 #undef __
6816 6832
6817 #endif // defined TARGET_ARCH_X64 6833 #endif // defined TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « runtime/vm/intermediate_language_arm.cc ('k') | runtime/vm/object.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698