Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(113)

Side by Side Diff: runtime/vm/intermediate_language.cc

Issue 1858283002: Initial SIMDBC interpreter. (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: address comments Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/intermediate_language.h" 5 #include "vm/intermediate_language.h"
6 6
7 #include "vm/bit_vector.h" 7 #include "vm/bit_vector.h"
8 #include "vm/bootstrap.h" 8 #include "vm/bootstrap.h"
9 #include "vm/compiler.h" 9 #include "vm/compiler.h"
10 #include "vm/constant_propagator.h" 10 #include "vm/constant_propagator.h"
(...skipping 15 matching lines...) Expand all
26 #include "vm/symbols.h" 26 #include "vm/symbols.h"
27 27
28 #include "vm/il_printer.h" 28 #include "vm/il_printer.h"
29 29
30 namespace dart { 30 namespace dart {
31 31
32 DEFINE_FLAG(bool, propagate_ic_data, true, 32 DEFINE_FLAG(bool, propagate_ic_data, true,
33 "Propagate IC data from unoptimized to optimized IC calls."); 33 "Propagate IC data from unoptimized to optimized IC calls.");
34 DEFINE_FLAG(bool, two_args_smi_icd, true, 34 DEFINE_FLAG(bool, two_args_smi_icd, true,
35 "Generate special IC stubs for two args Smi operations"); 35 "Generate special IC stubs for two args Smi operations");
36 DEFINE_FLAG(bool, unbox_numeric_fields, true, 36 DEFINE_FLAG(bool, unbox_numeric_fields, !USING_DBC,
37 "Support unboxed double and float32x4 fields."); 37 "Support unboxed double and float32x4 fields.");
38 DECLARE_FLAG(bool, eliminate_type_checks); 38 DECLARE_FLAG(bool, eliminate_type_checks);
39 39
40 40
41 #if defined(DEBUG) 41 #if defined(DEBUG)
42 void Instruction::CheckField(const Field& field) const { 42 void Instruction::CheckField(const Field& field) const {
43 ASSERT(field.IsZoneHandle()); 43 ASSERT(field.IsZoneHandle());
44 ASSERT(!Compiler::IsBackgroundCompilation() || !field.IsOriginal()); 44 ASSERT(!Compiler::IsBackgroundCompilation() || !field.IsOriginal());
45 } 45 }
46 #endif // DEBUG 46 #endif // DEBUG
(...skipping 2701 matching lines...) Expand 10 before | Expand all | Expand 10 after
2748 LocationSummary* TargetEntryInstr::MakeLocationSummary(Zone* zone, 2748 LocationSummary* TargetEntryInstr::MakeLocationSummary(Zone* zone,
2749 bool optimizing) const { 2749 bool optimizing) const {
2750 UNREACHABLE(); 2750 UNREACHABLE();
2751 return NULL; 2751 return NULL;
2752 } 2752 }
2753 2753
2754 2754
2755 void TargetEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 2755 void TargetEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
2756 __ Bind(compiler->GetJumpLabel(this)); 2756 __ Bind(compiler->GetJumpLabel(this));
2757 if (!compiler->is_optimizing()) { 2757 if (!compiler->is_optimizing()) {
2758 #if !defined(TARGET_ARCH_DBC)
2759 // TODO(vegorov) re-enable edge counters on DBC if we consider them
2760 // beneficial for the quality of the optimized bytecode.
2758 if (compiler->NeedsEdgeCounter(this)) { 2761 if (compiler->NeedsEdgeCounter(this)) {
2759 compiler->EmitEdgeCounter(preorder_number()); 2762 compiler->EmitEdgeCounter(preorder_number());
2760 } 2763 }
2764 #endif
2765
2761 // The deoptimization descriptor points after the edge counter code for 2766 // The deoptimization descriptor points after the edge counter code for
2762 // uniformity with ARM and MIPS, where we can reuse pattern matching 2767 // uniformity with ARM and MIPS, where we can reuse pattern matching
2763 // code that matches backwards from the end of the pattern. 2768 // code that matches backwards from the end of the pattern.
2764 compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt, 2769 compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt,
2765 GetDeoptId(), 2770 GetDeoptId(),
2766 TokenPosition::kNoSource); 2771 TokenPosition::kNoSource);
2767 } 2772 }
2768 if (HasParallelMove()) { 2773 if (HasParallelMove()) {
2769 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); 2774 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
2770 } 2775 }
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after
2950 Location::SameAsFirstInput(), 2955 Location::SameAsFirstInput(),
2951 LocationSummary::kNoCall) 2956 LocationSummary::kNoCall)
2952 : LocationSummary::Make(zone, 2957 : LocationSummary::Make(zone,
2953 0, 2958 0,
2954 Location::NoLocation(), 2959 Location::NoLocation(),
2955 LocationSummary::kNoCall); 2960 LocationSummary::kNoCall);
2956 } 2961 }
2957 2962
2958 2963
2959 void DropTempsInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 2964 void DropTempsInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
2965 #if defined(TARGET_ARCH_DBC)
2966 // On DBC the action of poping the TOS value and then pushing it
2967 // after all intermediates are poped is folded into a special
2968 // bytecode (DropR). On other architectures this is handled by
2969 // instruction prologue/epilogues.
2970 ASSERT(!compiler->is_optimizing());
2971 if (InputCount() != 0 && HasTemp()) {
zra 2016/04/14 18:27:48 (InputCount() != 0)
Vyacheslav Egorov (Google) 2016/04/18 15:56:41 Done.
2972 __ DropR(num_temps());
2973 } else {
2974 __ Drop(num_temps() + ((InputCount() != 0) ? 1 : 0));
2975 }
2976 #else
2960 ASSERT(!compiler->is_optimizing()); 2977 ASSERT(!compiler->is_optimizing());
2961 // Assert that register assignment is correct. 2978 // Assert that register assignment is correct.
2962 ASSERT((InputCount() == 0) || (locs()->out(0).reg() == locs()->in(0).reg())); 2979 ASSERT((InputCount() == 0) || (locs()->out(0).reg() == locs()->in(0).reg()));
2963 __ Drop(num_temps()); 2980 __ Drop(num_temps());
2981 #endif // defined(TARGET_ARCH_DBC)
2964 } 2982 }
2965 2983
2966 2984
2967 StrictCompareInstr::StrictCompareInstr(TokenPosition token_pos, 2985 StrictCompareInstr::StrictCompareInstr(TokenPosition token_pos,
2968 Token::Kind kind, 2986 Token::Kind kind,
2969 Value* left, 2987 Value* left,
2970 Value* right, 2988 Value* right,
2971 bool needs_number_check) 2989 bool needs_number_check)
2972 : ComparisonInstr(token_pos, 2990 : ComparisonInstr(token_pos,
2973 kind, 2991 kind,
2974 left, 2992 left,
2975 right, 2993 right,
2976 Thread::Current()->GetNextDeoptId()), 2994 Thread::Current()->GetNextDeoptId()),
2977 needs_number_check_(needs_number_check) { 2995 needs_number_check_(needs_number_check) {
2978 ASSERT((kind == Token::kEQ_STRICT) || (kind == Token::kNE_STRICT)); 2996 ASSERT((kind == Token::kEQ_STRICT) || (kind == Token::kNE_STRICT));
2979 } 2997 }
2980 2998
2981 2999
2982 LocationSummary* InstanceCallInstr::MakeLocationSummary(Zone* zone, 3000 LocationSummary* InstanceCallInstr::MakeLocationSummary(Zone* zone,
2983 bool optimizing) const { 3001 bool optimizing) const {
2984 return MakeCallSummary(zone); 3002 return MakeCallSummary(zone);
2985 } 3003 }
2986 3004
2987 3005
3006 // DBC does not use specialized inline cache stubs for smi operations.
3007 #if !defined(TARGET_ARCH_DBC)
2988 static const StubEntry* TwoArgsSmiOpInlineCacheEntry(Token::Kind kind) { 3008 static const StubEntry* TwoArgsSmiOpInlineCacheEntry(Token::Kind kind) {
2989 if (!FLAG_two_args_smi_icd) { 3009 if (!FLAG_two_args_smi_icd) {
2990 return 0; 3010 return 0;
2991 } 3011 }
2992 switch (kind) { 3012 switch (kind) {
2993 case Token::kADD: return StubCode::SmiAddInlineCache_entry(); 3013 case Token::kADD: return StubCode::SmiAddInlineCache_entry();
2994 case Token::kSUB: return StubCode::SmiSubInlineCache_entry(); 3014 case Token::kSUB: return StubCode::SmiSubInlineCache_entry();
2995 case Token::kEQ: return StubCode::SmiEqualInlineCache_entry(); 3015 case Token::kEQ: return StubCode::SmiEqualInlineCache_entry();
2996 default: return NULL; 3016 default: return NULL;
2997 } 3017 }
2998 } 3018 }
3019 #endif
2999 3020
3000 3021
3001 void InstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 3022 void InstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
3002 Zone* zone = compiler->zone(); 3023 Zone* zone = compiler->zone();
3003 const ICData* call_ic_data = NULL; 3024 const ICData* call_ic_data = NULL;
3004 if (!FLAG_propagate_ic_data || !compiler->is_optimizing() || 3025 if (!FLAG_propagate_ic_data || !compiler->is_optimizing() ||
3005 (ic_data() == NULL)) { 3026 (ic_data() == NULL)) {
3006 const Array& arguments_descriptor = 3027 const Array& arguments_descriptor =
3007 Array::Handle(zone, ArgumentsDescriptor::New(ArgumentCount(), 3028 Array::Handle(zone, ArgumentsDescriptor::New(ArgumentCount(),
3008 argument_names())); 3029 argument_names()));
3009 call_ic_data = compiler->GetOrAddInstanceCallICData( 3030 call_ic_data = compiler->GetOrAddInstanceCallICData(
3010 deopt_id(), function_name(), arguments_descriptor, 3031 deopt_id(), function_name(), arguments_descriptor,
3011 checked_argument_count()); 3032 checked_argument_count());
3012 } else { 3033 } else {
3013 call_ic_data = &ICData::ZoneHandle(zone, ic_data()->raw()); 3034 call_ic_data = &ICData::ZoneHandle(zone, ic_data()->raw());
3014 } 3035 }
3036
3037 #if !defined(TARGET_ARCH_DBC)
3015 if (compiler->is_optimizing() && HasICData()) { 3038 if (compiler->is_optimizing() && HasICData()) {
3016 ASSERT(HasICData()); 3039 ASSERT(HasICData());
3017 if (ic_data()->NumberOfUsedChecks() > 0) { 3040 if (ic_data()->NumberOfUsedChecks() > 0) {
3018 const ICData& unary_ic_data = 3041 const ICData& unary_ic_data =
3019 ICData::ZoneHandle(zone, ic_data()->AsUnaryClassChecks()); 3042 ICData::ZoneHandle(zone, ic_data()->AsUnaryClassChecks());
3020 compiler->GenerateInstanceCall(deopt_id(), 3043 compiler->GenerateInstanceCall(deopt_id(),
3021 token_pos(), 3044 token_pos(),
3022 ArgumentCount(), 3045 ArgumentCount(),
3023 locs(), 3046 locs(),
3024 unary_ic_data); 3047 unary_ic_data);
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
3078 compiler->EmitInstanceCall(*stub_entry, *call_ic_data, ArgumentCount(), 3101 compiler->EmitInstanceCall(*stub_entry, *call_ic_data, ArgumentCount(),
3079 deopt_id(), token_pos(), locs()); 3102 deopt_id(), token_pos(), locs());
3080 } else { 3103 } else {
3081 compiler->GenerateInstanceCall(deopt_id(), 3104 compiler->GenerateInstanceCall(deopt_id(),
3082 token_pos(), 3105 token_pos(),
3083 ArgumentCount(), 3106 ArgumentCount(),
3084 locs(), 3107 locs(),
3085 *call_ic_data); 3108 *call_ic_data);
3086 } 3109 }
3087 } 3110 }
3111 #else
3112 // Emit smi fast path instruction. If fast-path succeeds it skips the next
3113 // instruction otherwise it fall through.
zra 2016/04/14 18:27:48 falls
Vyacheslav Egorov (Google) 2016/04/18 15:56:41 Done.
3114 if (function_name().raw() == Symbols::Plus().raw()) {
3115 __ AddTOS();
3116 } else if (function_name().raw() == Symbols::EqualOperator().raw()) {
3117 __ EqualTOS();
3118 } else if (function_name().raw() == Symbols::LAngleBracket().raw()) {
3119 __ LessThanTOS();
3120 } else if (function_name().raw() == Symbols::RAngleBracket().raw()) {
3121 __ GreaterThanTOS();
3122 } else if (function_name().raw() == Symbols::BitAnd().raw()) {
3123 __ BitAndTOS();
3124 } else if (function_name().raw() == Symbols::BitOr().raw()) {
3125 __ BitOrTOS();
3126 } else if (function_name().raw() == Symbols::Star().raw()) {
3127 __ MulTOS();
3128 }
3129
3130 const intptr_t call_ic_data_kidx = __ AddConstant(*call_ic_data);
3131 switch (call_ic_data->NumArgsTested()) {
3132 case 1:
3133 __ InstanceCall(ArgumentCount(), call_ic_data_kidx);
3134 break;
3135 case 2:
3136 __ InstanceCall2(ArgumentCount(), call_ic_data_kidx);
3137 break;
3138 case 3:
3139 __ InstanceCall3(ArgumentCount(), call_ic_data_kidx);
3140 break;
3141 default:
3142 UNIMPLEMENTED();
3143 break;
3144 }
3145 compiler->AddCurrentDescriptor(RawPcDescriptors::kIcCall,
3146 deopt_id(),
3147 token_pos());
3148 #endif // !defined(TARGET_ARCH_DBC)
3088 } 3149 }
3089 3150
3090 3151
3091 bool PolymorphicInstanceCallInstr::HasSingleRecognizedTarget() const { 3152 bool PolymorphicInstanceCallInstr::HasSingleRecognizedTarget() const {
3092 return ic_data().HasOneTarget() && 3153 return ic_data().HasOneTarget() &&
3093 (MethodRecognizer::RecognizeKind( 3154 (MethodRecognizer::RecognizeKind(
3094 Function::Handle(ic_data().GetTargetAt(0))) != 3155 Function::Handle(ic_data().GetTargetAt(0))) !=
3095 MethodRecognizer::kUnknown); 3156 MethodRecognizer::kUnknown);
3096 } 3157 }
3097 3158
3098 3159
3099 bool PolymorphicInstanceCallInstr::HasOnlyDispatcherTargets() const { 3160 bool PolymorphicInstanceCallInstr::HasOnlyDispatcherTargets() const {
3100 for (intptr_t i = 0; i < ic_data().NumberOfChecks(); ++i) { 3161 for (intptr_t i = 0; i < ic_data().NumberOfChecks(); ++i) {
3101 const Function& target = Function::Handle(ic_data().GetTargetAt(i)); 3162 const Function& target = Function::Handle(ic_data().GetTargetAt(i));
3102 if (!target.IsNoSuchMethodDispatcher() && 3163 if (!target.IsNoSuchMethodDispatcher() &&
3103 !target.IsInvokeFieldDispatcher()) { 3164 !target.IsInvokeFieldDispatcher()) {
3104 return false; 3165 return false;
3105 } 3166 }
3106 } 3167 }
3107 return true; 3168 return true;
3108 } 3169 }
3109 3170
3171
3172 // DBC does not support optimizing compiler and thus doesn't emit
3173 // PolymorphicInstanceCallInstr.
3174 #if !defined(TARGET_ARCH_DBC)
3110 void PolymorphicInstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 3175 void PolymorphicInstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
3111 ASSERT(ic_data().NumArgsTested() == 1); 3176 ASSERT(ic_data().NumArgsTested() == 1);
3112 if (!with_checks()) { 3177 if (!with_checks()) {
3113 ASSERT(ic_data().HasOneTarget()); 3178 ASSERT(ic_data().HasOneTarget());
3114 const Function& target = Function::ZoneHandle(ic_data().GetTargetAt(0)); 3179 const Function& target = Function::ZoneHandle(ic_data().GetTargetAt(0));
3115 compiler->GenerateStaticCall(deopt_id(), 3180 compiler->GenerateStaticCall(deopt_id(),
3116 instance_call()->token_pos(), 3181 instance_call()->token_pos(),
3117 target, 3182 target,
3118 instance_call()->ArgumentCount(), 3183 instance_call()->ArgumentCount(),
3119 instance_call()->argument_names(), 3184 instance_call()->argument_names(),
3120 locs(), 3185 locs(),
3121 ICData::Handle()); 3186 ICData::Handle());
3122 return; 3187 return;
3123 } 3188 }
3124 3189
3125 compiler->EmitPolymorphicInstanceCall(ic_data(), 3190 compiler->EmitPolymorphicInstanceCall(ic_data(),
3126 instance_call()->ArgumentCount(), 3191 instance_call()->ArgumentCount(),
3127 instance_call()->argument_names(), 3192 instance_call()->argument_names(),
3128 deopt_id(), 3193 deopt_id(),
3129 instance_call()->token_pos(), 3194 instance_call()->token_pos(),
3130 locs()); 3195 locs());
3131 } 3196 }
3197 #endif
3132 3198
3133 3199
3134 LocationSummary* StaticCallInstr::MakeLocationSummary(Zone* zone, 3200 LocationSummary* StaticCallInstr::MakeLocationSummary(Zone* zone,
3135 bool optimizing) const { 3201 bool optimizing) const {
3136 return MakeCallSummary(zone); 3202 return MakeCallSummary(zone);
3137 } 3203 }
3138 3204
3139 3205
3140 void StaticCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 3206 void StaticCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
3207 #if !defined(TARGET_ARCH_DBC)
3141 const ICData* call_ic_data = NULL; 3208 const ICData* call_ic_data = NULL;
3142 if (!FLAG_propagate_ic_data || !compiler->is_optimizing() || 3209 if (!FLAG_propagate_ic_data || !compiler->is_optimizing() ||
3143 (ic_data() == NULL)) { 3210 (ic_data() == NULL)) {
3144 const Array& arguments_descriptor = 3211 const Array& arguments_descriptor =
3145 Array::Handle(ArgumentsDescriptor::New(ArgumentCount(), 3212 Array::Handle(ArgumentsDescriptor::New(ArgumentCount(),
3146 argument_names())); 3213 argument_names()));
3147 MethodRecognizer::Kind recognized_kind = 3214 MethodRecognizer::Kind recognized_kind =
3148 MethodRecognizer::RecognizeKind(function()); 3215 MethodRecognizer::RecognizeKind(function());
3149 int num_args_checked = 0; 3216 int num_args_checked = 0;
3150 switch (recognized_kind) { 3217 switch (recognized_kind) {
(...skipping 12 matching lines...) Expand all
3163 } else { 3230 } else {
3164 call_ic_data = &ICData::ZoneHandle(ic_data()->raw()); 3231 call_ic_data = &ICData::ZoneHandle(ic_data()->raw());
3165 } 3232 }
3166 compiler->GenerateStaticCall(deopt_id(), 3233 compiler->GenerateStaticCall(deopt_id(),
3167 token_pos(), 3234 token_pos(),
3168 function(), 3235 function(),
3169 ArgumentCount(), 3236 ArgumentCount(),
3170 argument_names(), 3237 argument_names(),
3171 locs(), 3238 locs(),
3172 *call_ic_data); 3239 *call_ic_data);
3240 #else
3241 const Array& arguments_descriptor =
3242 (ic_data() == NULL) ?
3243 Array::Handle(ArgumentsDescriptor::New(ArgumentCount(),
3244 argument_names())) :
3245 Array::Handle(ic_data()->arguments_descriptor());
3246 const intptr_t argdesc_kidx = __ AddConstant(arguments_descriptor);
3247
3248 __ PushConstant(function());
3249 __ StaticCall(ArgumentCount(), argdesc_kidx);
3250 compiler->AddCurrentDescriptor(RawPcDescriptors::kUnoptStaticCall,
3251 deopt_id(),
3252 token_pos());
3253 #endif // !defined(TARGET_ARCH_DBC)
3173 } 3254 }
3174 3255
3175 3256
3176 void AssertAssignableInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 3257 void AssertAssignableInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
3177 compiler->GenerateAssertAssignable(token_pos(), 3258 compiler->GenerateAssertAssignable(token_pos(),
3178 deopt_id(), 3259 deopt_id(),
3179 dst_type(), 3260 dst_type(),
3180 dst_name(), 3261 dst_name(),
3181 locs()); 3262 locs());
3263
3264 // DBC does not use LocationSummaries in the same way as other architectures.
3265 #if !defined(TARGET_ARCH_DBC)
3182 ASSERT(locs()->in(0).reg() == locs()->out(0).reg()); 3266 ASSERT(locs()->in(0).reg() == locs()->out(0).reg());
3267 #endif
3183 } 3268 }
3184 3269
3185 3270
3186 LocationSummary* DeoptimizeInstr::MakeLocationSummary(Zone* zone, 3271 LocationSummary* DeoptimizeInstr::MakeLocationSummary(Zone* zone,
3187 bool opt) const { 3272 bool opt) const {
3188 return new(zone) LocationSummary(zone, 0, 0, LocationSummary::kNoCall); 3273 return new(zone) LocationSummary(zone, 0, 0, LocationSummary::kNoCall);
3189 } 3274 }
3190 3275
3191 3276
3192 void DeoptimizeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 3277 void DeoptimizeInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after
3283 intptr_t use_index = instr->env()->Length(); // Start index after inner. 3368 intptr_t use_index = instr->env()->Length(); // Start index after inner.
3284 for (Environment::DeepIterator it(copy); !it.Done(); it.Advance()) { 3369 for (Environment::DeepIterator it(copy); !it.Done(); it.Advance()) {
3285 Value* value = it.CurrentValue(); 3370 Value* value = it.CurrentValue();
3286 value->set_instruction(instr); 3371 value->set_instruction(instr);
3287 value->set_use_index(use_index++); 3372 value->set_use_index(use_index++);
3288 value->definition()->AddEnvUse(value); 3373 value->definition()->AddEnvUse(value);
3289 } 3374 }
3290 } 3375 }
3291 3376
3292 3377
3293 static bool BindsToSmiConstant(Value* value) {
3294 return value->BindsToConstant() && value->BoundConstant().IsSmi();
3295 }
3296
3297
3298 ComparisonInstr* EqualityCompareInstr::CopyWithNewOperands(Value* new_left, 3378 ComparisonInstr* EqualityCompareInstr::CopyWithNewOperands(Value* new_left,
3299 Value* new_right) { 3379 Value* new_right) {
3300 return new EqualityCompareInstr(token_pos(), 3380 return new EqualityCompareInstr(token_pos(),
3301 kind(), 3381 kind(),
3302 new_left, 3382 new_left,
3303 new_right, 3383 new_right,
3304 operation_cid(), 3384 operation_cid(),
3305 deopt_id()); 3385 deopt_id());
3306 } 3386 }
3307 3387
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
3355 } 3435 }
3356 for (intptr_t i = 0; i < cid_results().length(); i++) { 3436 for (intptr_t i = 0; i < cid_results().length(); i++) {
3357 if (cid_results()[i] != other_instr->cid_results()[i]) { 3437 if (cid_results()[i] != other_instr->cid_results()[i]) {
3358 return false; 3438 return false;
3359 } 3439 }
3360 } 3440 }
3361 return true; 3441 return true;
3362 } 3442 }
3363 3443
3364 3444
3445 #if !defined(TARGET_ARCH_DBC)
3446 static bool BindsToSmiConstant(Value* value) {
3447 return value->BindsToConstant() && value->BoundConstant().IsSmi();
3448 }
3449 #endif
3450
3451
3365 bool IfThenElseInstr::Supports(ComparisonInstr* comparison, 3452 bool IfThenElseInstr::Supports(ComparisonInstr* comparison,
3366 Value* v1, 3453 Value* v1,
3367 Value* v2) { 3454 Value* v2) {
3455 #if !defined(TARGET_ARCH_DBC)
3368 bool is_smi_result = BindsToSmiConstant(v1) && BindsToSmiConstant(v2); 3456 bool is_smi_result = BindsToSmiConstant(v1) && BindsToSmiConstant(v2);
3369 if (comparison->IsStrictCompare()) { 3457 if (comparison->IsStrictCompare()) {
3370 // Strict comparison with number checks calls a stub and is not supported 3458 // Strict comparison with number checks calls a stub and is not supported
3371 // by if-conversion. 3459 // by if-conversion.
3372 return is_smi_result 3460 return is_smi_result
3373 && !comparison->AsStrictCompare()->needs_number_check(); 3461 && !comparison->AsStrictCompare()->needs_number_check();
3374 } 3462 }
3375 if (comparison->operation_cid() != kSmiCid) { 3463 if (comparison->operation_cid() != kSmiCid) {
3376 // Non-smi comparisons are not supported by if-conversion. 3464 // Non-smi comparisons are not supported by if-conversion.
3377 return false; 3465 return false;
3378 } 3466 }
3379 return is_smi_result; 3467 return is_smi_result;
3468 #else
3469 return false;
3470 #endif // !defined(TARGET_ARCH_DBC)
3380 } 3471 }
3381 3472
3382 3473
3383 bool PhiInstr::IsRedundant() const { 3474 bool PhiInstr::IsRedundant() const {
3384 ASSERT(InputCount() > 1); 3475 ASSERT(InputCount() > 1);
3385 Definition* first = InputAt(0)->definition(); 3476 Definition* first = InputAt(0)->definition();
3386 for (intptr_t i = 1; i < InputCount(); ++i) { 3477 for (intptr_t i = 1; i < InputCount(); ++i) {
3387 Definition* def = InputAt(i)->definition(); 3478 Definition* def = InputAt(i)->definition();
3388 if (def != first) return false; 3479 if (def != first) return false;
3389 } 3480 }
(...skipping 344 matching lines...) Expand 10 before | Expand all | Expand 10 after
3734 set_native_c_function(native_function); 3825 set_native_c_function(native_function);
3735 function().SetIsNativeAutoSetupScope(auto_setup_scope); 3826 function().SetIsNativeAutoSetupScope(auto_setup_scope);
3736 Dart_NativeEntryResolver resolver = library.native_entry_resolver(); 3827 Dart_NativeEntryResolver resolver = library.native_entry_resolver();
3737 bool is_bootstrap_native = Bootstrap::IsBootstapResolver(resolver); 3828 bool is_bootstrap_native = Bootstrap::IsBootstapResolver(resolver);
3738 set_is_bootstrap_native(is_bootstrap_native); 3829 set_is_bootstrap_native(is_bootstrap_native);
3739 } 3830 }
3740 3831
3741 #undef __ 3832 #undef __
3742 3833
3743 } // namespace dart 3834 } // namespace dart
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698