Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(158)

Side by Side Diff: runtime/vm/intermediate_language_arm.cc

Issue 2584613002: PATCH (not to be comitted): Support for printing instruction statistics
Patch Set: Fixed polymorphic call inside try, added more tags for remaining unknown code Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/intermediate_language.cc ('k') | runtime/vm/intermediate_language_x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 1
2 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 2 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
3 // for details. All rights reserved. Use of this source code is governed by a 3 // for details. All rights reserved. Use of this source code is governed by a
4 // BSD-style license that can be found in the LICENSE file. 4 // BSD-style license that can be found in the LICENSE file.
5 5
6 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM. 6 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM.
7 #if defined(TARGET_ARCH_ARM) 7 #if defined(TARGET_ARCH_ARM)
8 8
9 #include "vm/intermediate_language.h" 9 #include "vm/intermediate_language.h"
10 10
(...skipping 1916 matching lines...) Expand 10 before | Expand all | Expand 10 after
1927 1927
1928 1928
1929 class BoxAllocationSlowPath : public SlowPathCode { 1929 class BoxAllocationSlowPath : public SlowPathCode {
1930 public: 1930 public:
1931 BoxAllocationSlowPath(Instruction* instruction, 1931 BoxAllocationSlowPath(Instruction* instruction,
1932 const Class& cls, 1932 const Class& cls,
1933 Register result) 1933 Register result)
1934 : instruction_(instruction), cls_(cls), result_(result) {} 1934 : instruction_(instruction), cls_(cls), result_(result) {}
1935 1935
1936 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 1936 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
1937 compiler->SpecialStatsBegin(
1938 CombinedCodeStatistics::kTagBoxAllocationSlowPath);
1937 if (Assembler::EmittingComments()) { 1939 if (Assembler::EmittingComments()) {
1938 __ Comment("%s slow path allocation of %s", instruction_->DebugName(), 1940 __ Comment("%s slow path allocation of %s", instruction_->DebugName(),
1939 String::Handle(cls_.ScrubbedName()).ToCString()); 1941 String::Handle(cls_.ScrubbedName()).ToCString());
1940 } 1942 }
1941 __ Bind(entry_label()); 1943 __ Bind(entry_label());
1942 const Code& stub = Code::ZoneHandle( 1944 const Code& stub = Code::ZoneHandle(
1943 compiler->zone(), StubCode::GetAllocationStubForClass(cls_)); 1945 compiler->zone(), StubCode::GetAllocationStubForClass(cls_));
1944 const StubEntry stub_entry(stub); 1946 const StubEntry stub_entry(stub);
1945 1947
1946 LocationSummary* locs = instruction_->locs(); 1948 LocationSummary* locs = instruction_->locs();
1947 1949
1948 locs->live_registers()->Remove(Location::RegisterLocation(result_)); 1950 locs->live_registers()->Remove(Location::RegisterLocation(result_));
1949 1951
1950 compiler->SaveLiveRegisters(locs); 1952 compiler->SaveLiveRegisters(locs);
1951 compiler->GenerateCall(TokenPosition::kNoSource, // No token position. 1953 compiler->GenerateCall(TokenPosition::kNoSource, // No token position.
1952 stub_entry, RawPcDescriptors::kOther, locs); 1954 stub_entry, RawPcDescriptors::kOther, locs);
1953 compiler->AddStubCallTarget(stub); 1955 compiler->AddStubCallTarget(stub);
1954 __ MoveRegister(result_, R0); 1956 __ MoveRegister(result_, R0);
1955 compiler->RestoreLiveRegisters(locs); 1957 compiler->RestoreLiveRegisters(locs);
1956 __ b(exit_label()); 1958 __ b(exit_label());
1959 compiler->SpecialStatsEnd(
1960 CombinedCodeStatistics::kTagBoxAllocationSlowPath);
1957 } 1961 }
1958 1962
1959 static void Allocate(FlowGraphCompiler* compiler, 1963 static void Allocate(FlowGraphCompiler* compiler,
1960 Instruction* instruction, 1964 Instruction* instruction,
1961 const Class& cls, 1965 const Class& cls,
1962 Register result, 1966 Register result,
1963 Register temp) { 1967 Register temp) {
1964 if (compiler->intrinsic_mode()) { 1968 if (compiler->intrinsic_mode()) {
1965 __ TryAllocate(cls, compiler->intrinsic_slow_path_label(), result, temp); 1969 __ TryAllocate(cls, compiler->intrinsic_slow_path_label(), result, temp);
1966 } else { 1970 } else {
(...skipping 760 matching lines...) Expand 10 before | Expand all | Expand 10 after
2727 } 2731 }
2728 2732
2729 2733
2730 class AllocateContextSlowPath : public SlowPathCode { 2734 class AllocateContextSlowPath : public SlowPathCode {
2731 public: 2735 public:
2732 explicit AllocateContextSlowPath( 2736 explicit AllocateContextSlowPath(
2733 AllocateUninitializedContextInstr* instruction) 2737 AllocateUninitializedContextInstr* instruction)
2734 : instruction_(instruction) {} 2738 : instruction_(instruction) {}
2735 2739
2736 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 2740 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
2741 compiler->SpecialStatsBegin(
2742 CombinedCodeStatistics::kTagAllocateContextSlowPath);
2737 __ Comment("AllocateContextSlowPath"); 2743 __ Comment("AllocateContextSlowPath");
2738 __ Bind(entry_label()); 2744 __ Bind(entry_label());
2739 2745
2740 LocationSummary* locs = instruction_->locs(); 2746 LocationSummary* locs = instruction_->locs();
2741 locs->live_registers()->Remove(locs->out(0)); 2747 locs->live_registers()->Remove(locs->out(0));
2742 2748
2743 compiler->SaveLiveRegisters(locs); 2749 compiler->SaveLiveRegisters(locs);
2744 2750
2745 __ LoadImmediate(R1, instruction_->num_context_variables()); 2751 __ LoadImmediate(R1, instruction_->num_context_variables());
2746 const Code& stub = Code::ZoneHandle( 2752 const Code& stub = Code::ZoneHandle(
2747 compiler->zone(), StubCode::AllocateContext_entry()->code()); 2753 compiler->zone(), StubCode::AllocateContext_entry()->code());
2748 compiler->AddStubCallTarget(stub); 2754 compiler->AddStubCallTarget(stub);
2749 compiler->GenerateCall(instruction_->token_pos(), 2755 compiler->GenerateCall(instruction_->token_pos(),
2750 *StubCode::AllocateContext_entry(), 2756 *StubCode::AllocateContext_entry(),
2751 RawPcDescriptors::kOther, locs); 2757 RawPcDescriptors::kOther, locs);
2752 ASSERT(instruction_->locs()->out(0).reg() == R0); 2758 ASSERT(instruction_->locs()->out(0).reg() == R0);
2753 compiler->RestoreLiveRegisters(instruction_->locs()); 2759 compiler->RestoreLiveRegisters(instruction_->locs());
2754 __ b(exit_label()); 2760 __ b(exit_label());
2761 compiler->SpecialStatsEnd(
2762 CombinedCodeStatistics::kTagAllocateContextSlowPath);
2755 } 2763 }
2756 2764
2757 private: 2765 private:
2758 AllocateUninitializedContextInstr* instruction_; 2766 AllocateUninitializedContextInstr* instruction_;
2759 }; 2767 };
2760 2768
2761 2769
2762 void AllocateUninitializedContextInstr::EmitNativeCode( 2770 void AllocateUninitializedContextInstr::EmitNativeCode(
2763 FlowGraphCompiler* compiler) { 2771 FlowGraphCompiler* compiler) {
2764 Register temp0 = locs()->temp(0).reg(); 2772 Register temp0 = locs()->temp(0).reg();
(...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after
2941 return summary; 2949 return summary;
2942 } 2950 }
2943 2951
2944 2952
2945 class CheckStackOverflowSlowPath : public SlowPathCode { 2953 class CheckStackOverflowSlowPath : public SlowPathCode {
2946 public: 2954 public:
2947 explicit CheckStackOverflowSlowPath(CheckStackOverflowInstr* instruction) 2955 explicit CheckStackOverflowSlowPath(CheckStackOverflowInstr* instruction)
2948 : instruction_(instruction) {} 2956 : instruction_(instruction) {}
2949 2957
2950 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 2958 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
2959 compiler->SpecialStatsBegin(
2960 CombinedCodeStatistics::kTagCheckStackOverflowSlowPath);
2951 if (FLAG_use_osr && osr_entry_label()->IsLinked()) { 2961 if (FLAG_use_osr && osr_entry_label()->IsLinked()) {
2952 const Register value = instruction_->locs()->temp(0).reg(); 2962 const Register value = instruction_->locs()->temp(0).reg();
2953 __ Comment("CheckStackOverflowSlowPathOsr"); 2963 __ Comment("CheckStackOverflowSlowPathOsr");
2954 __ Bind(osr_entry_label()); 2964 __ Bind(osr_entry_label());
2955 __ LoadImmediate(value, Thread::kOsrRequest); 2965 __ LoadImmediate(value, Thread::kOsrRequest);
2956 __ str(value, Address(THR, Thread::stack_overflow_flags_offset())); 2966 __ str(value, Address(THR, Thread::stack_overflow_flags_offset()));
2957 } 2967 }
2958 __ Comment("CheckStackOverflowSlowPath"); 2968 __ Comment("CheckStackOverflowSlowPath");
2959 __ Bind(entry_label()); 2969 __ Bind(entry_label());
2960 compiler->SaveLiveRegisters(instruction_->locs()); 2970 compiler->SaveLiveRegisters(instruction_->locs());
2961 // pending_deoptimization_env_ is needed to generate a runtime call that 2971 // pending_deoptimization_env_ is needed to generate a runtime call that
2962 // may throw an exception. 2972 // may throw an exception.
2963 ASSERT(compiler->pending_deoptimization_env_ == NULL); 2973 ASSERT(compiler->pending_deoptimization_env_ == NULL);
2964 Environment* env = compiler->SlowPathEnvironmentFor(instruction_); 2974 Environment* env = compiler->SlowPathEnvironmentFor(instruction_);
2965 compiler->pending_deoptimization_env_ = env; 2975 compiler->pending_deoptimization_env_ = env;
2966 compiler->GenerateRuntimeCall( 2976 compiler->GenerateRuntimeCall(
2967 instruction_->token_pos(), instruction_->deopt_id(), 2977 instruction_->token_pos(), instruction_->deopt_id(),
2968 kStackOverflowRuntimeEntry, 0, instruction_->locs()); 2978 kStackOverflowRuntimeEntry, 0, instruction_->locs());
2969 2979
2970 if (FLAG_use_osr && !compiler->is_optimizing() && instruction_->in_loop()) { 2980 if (FLAG_use_osr && !compiler->is_optimizing() && instruction_->in_loop()) {
2971 // In unoptimized code, record loop stack checks as possible OSR entries. 2981 // In unoptimized code, record loop stack checks as possible OSR entries.
2972 compiler->AddCurrentDescriptor(RawPcDescriptors::kOsrEntry, 2982 compiler->AddCurrentDescriptor(RawPcDescriptors::kOsrEntry,
2973 instruction_->deopt_id(), 2983 instruction_->deopt_id(),
2974 TokenPosition::kNoSource); 2984 TokenPosition::kNoSource);
2975 } 2985 }
2976 compiler->pending_deoptimization_env_ = NULL; 2986 compiler->pending_deoptimization_env_ = NULL;
2977 compiler->RestoreLiveRegisters(instruction_->locs()); 2987 compiler->RestoreLiveRegisters(instruction_->locs());
2978 __ b(exit_label()); 2988 __ b(exit_label());
2989 compiler->SpecialStatsEnd(
2990 CombinedCodeStatistics::kTagCheckStackOverflowSlowPath);
2979 } 2991 }
2980 2992
2981 Label* osr_entry_label() { 2993 Label* osr_entry_label() {
2982 ASSERT(FLAG_use_osr); 2994 ASSERT(FLAG_use_osr);
2983 return &osr_entry_label_; 2995 return &osr_entry_label_;
2984 } 2996 }
2985 2997
2986 private: 2998 private:
2987 CheckStackOverflowInstr* instruction_; 2999 CheckStackOverflowInstr* instruction_;
2988 Label osr_entry_label_; 3000 Label osr_entry_label_;
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
3109 } 3121 }
3110 } 3122 }
3111 3123
3112 3124
3113 class CheckedSmiSlowPath : public SlowPathCode { 3125 class CheckedSmiSlowPath : public SlowPathCode {
3114 public: 3126 public:
3115 CheckedSmiSlowPath(CheckedSmiOpInstr* instruction, intptr_t try_index) 3127 CheckedSmiSlowPath(CheckedSmiOpInstr* instruction, intptr_t try_index)
3116 : instruction_(instruction), try_index_(try_index) {} 3128 : instruction_(instruction), try_index_(try_index) {}
3117 3129
3118 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 3130 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
3131 compiler->SpecialStatsBegin(CombinedCodeStatistics::kTagCheckedSmiSlowPath);
3119 if (Assembler::EmittingComments()) { 3132 if (Assembler::EmittingComments()) {
3120 __ Comment("slow path smi operation"); 3133 __ Comment("slow path smi operation");
3121 } 3134 }
3122 __ Bind(entry_label()); 3135 __ Bind(entry_label());
3123 LocationSummary* locs = instruction_->locs(); 3136 LocationSummary* locs = instruction_->locs();
3124 Register result = locs->out(0).reg(); 3137 Register result = locs->out(0).reg();
3125 locs->live_registers()->Remove(Location::RegisterLocation(result)); 3138 locs->live_registers()->Remove(Location::RegisterLocation(result));
3126 3139
3127 compiler->SaveLiveRegisters(locs); 3140 compiler->SaveLiveRegisters(locs);
3128 __ Push(locs->in(0).reg()); 3141 __ Push(locs->in(0).reg());
3129 __ Push(locs->in(1).reg()); 3142 __ Push(locs->in(1).reg());
3130 compiler->EmitMegamorphicInstanceCall( 3143 compiler->EmitMegamorphicInstanceCall(
3131 *instruction_->call()->ic_data(), instruction_->call()->ArgumentCount(), 3144 *instruction_->call()->ic_data(), instruction_->call()->ArgumentCount(),
3132 instruction_->call()->deopt_id(), instruction_->call()->token_pos(), 3145 instruction_->call()->deopt_id(), instruction_->call()->token_pos(),
3133 locs, try_index_, 3146 locs, try_index_,
3134 /* slow_path_argument_count = */ 2); 3147 /* slow_path_argument_count = */ 2);
3135 __ mov(result, Operand(R0)); 3148 __ mov(result, Operand(R0));
3136 compiler->RestoreLiveRegisters(locs); 3149 compiler->RestoreLiveRegisters(locs);
3137 __ b(exit_label()); 3150 __ b(exit_label());
3151 compiler->SpecialStatsEnd(CombinedCodeStatistics::kTagCheckedSmiSlowPath);
3138 } 3152 }
3139 3153
3140 private: 3154 private:
3141 CheckedSmiOpInstr* instruction_; 3155 CheckedSmiOpInstr* instruction_;
3142 intptr_t try_index_; 3156 intptr_t try_index_;
3143 }; 3157 };
3144 3158
3145 3159
3146 LocationSummary* CheckedSmiOpInstr::MakeLocationSummary(Zone* zone, 3160 LocationSummary* CheckedSmiOpInstr::MakeLocationSummary(Zone* zone,
3147 bool opt) const { 3161 bool opt) const {
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
3243 CheckedSmiComparisonSlowPath(CheckedSmiComparisonInstr* instruction, 3257 CheckedSmiComparisonSlowPath(CheckedSmiComparisonInstr* instruction,
3244 intptr_t try_index, 3258 intptr_t try_index,
3245 BranchLabels labels, 3259 BranchLabels labels,
3246 bool merged) 3260 bool merged)
3247 : instruction_(instruction), 3261 : instruction_(instruction),
3248 try_index_(try_index), 3262 try_index_(try_index),
3249 labels_(labels), 3263 labels_(labels),
3250 merged_(merged) {} 3264 merged_(merged) {}
3251 3265
3252 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 3266 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
3267 compiler->SpecialStatsBegin(CombinedCodeStatistics::kTagCheckedSmiCmpSlowPat h);
3253 if (Assembler::EmittingComments()) { 3268 if (Assembler::EmittingComments()) {
3254 __ Comment("slow path smi operation"); 3269 __ Comment("slow path smi operation");
3255 } 3270 }
3256 __ Bind(entry_label()); 3271 __ Bind(entry_label());
3257 LocationSummary* locs = instruction_->locs(); 3272 LocationSummary* locs = instruction_->locs();
3258 Register result = merged_ ? locs->temp(0).reg() : locs->out(0).reg(); 3273 Register result = merged_ ? locs->temp(0).reg() : locs->out(0).reg();
3259 locs->live_registers()->Remove(Location::RegisterLocation(result)); 3274 locs->live_registers()->Remove(Location::RegisterLocation(result));
3260 3275
3261 compiler->SaveLiveRegisters(locs); 3276 compiler->SaveLiveRegisters(locs);
3262 __ Push(locs->in(0).reg()); 3277 __ Push(locs->in(0).reg());
3263 __ Push(locs->in(1).reg()); 3278 __ Push(locs->in(1).reg());
3264 compiler->EmitMegamorphicInstanceCall( 3279 compiler->EmitMegamorphicInstanceCall(
3265 *instruction_->call()->ic_data(), instruction_->call()->ArgumentCount(), 3280 *instruction_->call()->ic_data(), instruction_->call()->ArgumentCount(),
3266 instruction_->call()->deopt_id(), instruction_->call()->token_pos(), 3281 instruction_->call()->deopt_id(), instruction_->call()->token_pos(),
3267 locs, try_index_, 3282 locs, try_index_,
3268 /* slow_path_argument_count = */ 2); 3283 /* slow_path_argument_count = */ 2);
3269 __ mov(result, Operand(R0)); 3284 __ mov(result, Operand(R0));
3270 compiler->RestoreLiveRegisters(locs); 3285 compiler->RestoreLiveRegisters(locs);
3271 if (merged_) { 3286 if (merged_) {
3272 __ CompareObject(result, Bool::True()); 3287 __ CompareObject(result, Bool::True());
3273 __ b( 3288 __ b(
3274 instruction_->is_negated() ? labels_.false_label : labels_.true_label, 3289 instruction_->is_negated() ? labels_.false_label : labels_.true_label,
3275 EQ); 3290 EQ);
3276 __ b(instruction_->is_negated() ? labels_.true_label 3291 __ b(instruction_->is_negated() ? labels_.true_label
3277 : labels_.false_label); 3292 : labels_.false_label);
3278 } else { 3293 } else {
3279 __ b(exit_label()); 3294 __ b(exit_label());
3280 } 3295 }
3296 compiler->SpecialStatsEnd(CombinedCodeStatistics::kTagCheckedSmiCmpSlowPath) ;
3281 } 3297 }
3282 3298
3283 private: 3299 private:
3284 CheckedSmiComparisonInstr* instruction_; 3300 CheckedSmiComparisonInstr* instruction_;
3285 intptr_t try_index_; 3301 intptr_t try_index_;
3286 BranchLabels labels_; 3302 BranchLabels labels_;
3287 bool merged_; 3303 bool merged_;
3288 }; 3304 };
3289 3305
3290 3306
(...skipping 3936 matching lines...) Expand 10 before | Expand all | Expand 10 after
7227 compiler->GenerateRuntimeCall(TokenPosition::kNoSource, deopt_id(), 7243 compiler->GenerateRuntimeCall(TokenPosition::kNoSource, deopt_id(),
7228 kGrowRegExpStackRuntimeEntry, 1, locs()); 7244 kGrowRegExpStackRuntimeEntry, 1, locs());
7229 __ Drop(1); 7245 __ Drop(1);
7230 __ Pop(result); 7246 __ Pop(result);
7231 } 7247 }
7232 7248
7233 7249
7234 } // namespace dart 7250 } // namespace dart
7235 7251
7236 #endif // defined TARGET_ARCH_ARM 7252 #endif // defined TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « runtime/vm/intermediate_language.cc ('k') | runtime/vm/intermediate_language_x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698