Index: runtime/vm/intermediate_language.cc |
diff --git a/runtime/vm/intermediate_language.cc b/runtime/vm/intermediate_language.cc |
index c68f277692311605272ee04f5a0cced09fcf3624..927b74758457f396e0bae3f3b74a12228d89aa0b 100644 |
--- a/runtime/vm/intermediate_language.cc |
+++ b/runtime/vm/intermediate_language.cc |
@@ -33,7 +33,7 @@ DEFINE_FLAG(bool, propagate_ic_data, true, |
"Propagate IC data from unoptimized to optimized IC calls."); |
DEFINE_FLAG(bool, two_args_smi_icd, true, |
"Generate special IC stubs for two args Smi operations"); |
-DEFINE_FLAG(bool, unbox_numeric_fields, true, |
+DEFINE_FLAG(bool, unbox_numeric_fields, !USING_DBC, |
"Support unboxed double and float32x4 fields."); |
DECLARE_FLAG(bool, eliminate_type_checks); |
DECLARE_FLAG(bool, support_externalizable_strings); |
@@ -2766,9 +2766,14 @@ LocationSummary* TargetEntryInstr::MakeLocationSummary(Zone* zone, |
void TargetEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
__ Bind(compiler->GetJumpLabel(this)); |
if (!compiler->is_optimizing()) { |
+#if !defined(TARGET_ARCH_DBC) |
+ // TODO(vegorov) re-enable edge counters on DBC if we consider them |
+ // beneficial for the quality of the optimized bytecode. |
if (compiler->NeedsEdgeCounter(this)) { |
compiler->EmitEdgeCounter(preorder_number()); |
} |
+#endif |
+ |
// The deoptimization descriptor points after the edge counter code for |
// uniformity with ARM and MIPS, where we can reuse pattern matching |
// code that matches backwards from the end of the pattern. |
@@ -2968,10 +2973,23 @@ LocationSummary* DropTempsInstr::MakeLocationSummary(Zone* zone, |
void DropTempsInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
+#if defined(TARGET_ARCH_DBC) |
+ // On DBC the action of poping the TOS value and then pushing it |
+ // after all intermediates are poped is folded into a special |
+ // bytecode (DropR). On other architectures this is handled by |
+ // instruction prologue/epilogues. |
+ ASSERT(!compiler->is_optimizing()); |
+ if ((InputCount() != 0) && HasTemp()) { |
+ __ DropR(num_temps()); |
+ } else { |
+ __ Drop(num_temps() + ((InputCount() != 0) ? 1 : 0)); |
+ } |
+#else |
ASSERT(!compiler->is_optimizing()); |
// Assert that register assignment is correct. |
ASSERT((InputCount() == 0) || (locs()->out(0).reg() == locs()->in(0).reg())); |
__ Drop(num_temps()); |
+#endif // defined(TARGET_ARCH_DBC) |
} |
@@ -2996,6 +3014,8 @@ LocationSummary* InstanceCallInstr::MakeLocationSummary(Zone* zone, |
} |
+// DBC does not use specialized inline cache stubs for smi operations. |
+#if !defined(TARGET_ARCH_DBC) |
static const StubEntry* TwoArgsSmiOpInlineCacheEntry(Token::Kind kind) { |
if (!FLAG_two_args_smi_icd) { |
return 0; |
@@ -3007,6 +3027,7 @@ static const StubEntry* TwoArgsSmiOpInlineCacheEntry(Token::Kind kind) { |
default: return NULL; |
} |
} |
+#endif |
void InstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
@@ -3023,6 +3044,8 @@ void InstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
} else { |
call_ic_data = &ICData::ZoneHandle(zone, ic_data()->raw()); |
} |
+ |
+#if !defined(TARGET_ARCH_DBC) |
if (compiler->is_optimizing() && HasICData()) { |
ASSERT(HasICData()); |
if (ic_data()->NumberOfUsedChecks() > 0) { |
@@ -3096,6 +3119,44 @@ void InstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
*call_ic_data); |
} |
} |
+#else |
+ // Emit smi fast path instruction. If fast-path succeeds it skips the next |
+ // instruction otherwise it falls through. |
+ if (function_name().raw() == Symbols::Plus().raw()) { |
+ __ AddTOS(); |
+ } else if (function_name().raw() == Symbols::EqualOperator().raw()) { |
+ __ EqualTOS(); |
+ } else if (function_name().raw() == Symbols::LAngleBracket().raw()) { |
+ __ LessThanTOS(); |
+ } else if (function_name().raw() == Symbols::RAngleBracket().raw()) { |
+ __ GreaterThanTOS(); |
+ } else if (function_name().raw() == Symbols::BitAnd().raw()) { |
+ __ BitAndTOS(); |
+ } else if (function_name().raw() == Symbols::BitOr().raw()) { |
+ __ BitOrTOS(); |
+ } else if (function_name().raw() == Symbols::Star().raw()) { |
+ __ MulTOS(); |
+ } |
+ |
+ const intptr_t call_ic_data_kidx = __ AddConstant(*call_ic_data); |
+ switch (call_ic_data->NumArgsTested()) { |
+ case 1: |
+ __ InstanceCall(ArgumentCount(), call_ic_data_kidx); |
+ break; |
+ case 2: |
+ __ InstanceCall2(ArgumentCount(), call_ic_data_kidx); |
+ break; |
+ case 3: |
+ __ InstanceCall3(ArgumentCount(), call_ic_data_kidx); |
+ break; |
+ default: |
+ UNIMPLEMENTED(); |
+ break; |
+ } |
+ compiler->AddCurrentDescriptor(RawPcDescriptors::kIcCall, |
+ deopt_id(), |
+ token_pos()); |
+#endif // !defined(TARGET_ARCH_DBC) |
} |
@@ -3118,6 +3179,10 @@ bool PolymorphicInstanceCallInstr::HasOnlyDispatcherTargets() const { |
return true; |
} |
+ |
+// DBC does not support optimizing compiler and thus doesn't emit |
+// PolymorphicInstanceCallInstr. |
+#if !defined(TARGET_ARCH_DBC) |
void PolymorphicInstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
ASSERT(ic_data().NumArgsTested() == 1); |
if (!with_checks()) { |
@@ -3141,6 +3206,7 @@ void PolymorphicInstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
locs(), |
complete()); |
} |
+#endif |
LocationSummary* StaticCallInstr::MakeLocationSummary(Zone* zone, |
@@ -3150,6 +3216,7 @@ LocationSummary* StaticCallInstr::MakeLocationSummary(Zone* zone, |
void StaticCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
+#if !defined(TARGET_ARCH_DBC) |
const ICData* call_ic_data = NULL; |
if (!FLAG_propagate_ic_data || !compiler->is_optimizing() || |
(ic_data() == NULL)) { |
@@ -3182,6 +3249,20 @@ void StaticCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
argument_names(), |
locs(), |
*call_ic_data); |
+#else |
+ const Array& arguments_descriptor = |
+ (ic_data() == NULL) ? |
+ Array::Handle(ArgumentsDescriptor::New(ArgumentCount(), |
+ argument_names())) : |
+ Array::Handle(ic_data()->arguments_descriptor()); |
+ const intptr_t argdesc_kidx = __ AddConstant(arguments_descriptor); |
+ |
+ __ PushConstant(function()); |
+ __ StaticCall(ArgumentCount(), argdesc_kidx); |
+ compiler->AddCurrentDescriptor(RawPcDescriptors::kUnoptStaticCall, |
+ deopt_id(), |
+ token_pos()); |
+#endif // !defined(TARGET_ARCH_DBC) |
} |
@@ -3191,7 +3272,11 @@ void AssertAssignableInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
dst_type(), |
dst_name(), |
locs()); |
+ |
+ // DBC does not use LocationSummaries in the same way as other architectures. |
+#if !defined(TARGET_ARCH_DBC) |
ASSERT(locs()->in(0).reg() == locs()->out(0).reg()); |
+#endif |
} |
@@ -3302,11 +3387,6 @@ void Environment::DeepCopyToOuter(Zone* zone, Instruction* instr) const { |
} |
-static bool BindsToSmiConstant(Value* value) { |
- return value->BindsToConstant() && value->BoundConstant().IsSmi(); |
-} |
- |
- |
ComparisonInstr* EqualityCompareInstr::CopyWithNewOperands(Value* new_left, |
Value* new_right) { |
return new EqualityCompareInstr(token_pos(), |
@@ -3374,9 +3454,17 @@ bool TestCidsInstr::AttributesEqual(Instruction* other) const { |
} |
+#if !defined(TARGET_ARCH_DBC) |
+static bool BindsToSmiConstant(Value* value) { |
+ return value->BindsToConstant() && value->BoundConstant().IsSmi(); |
+} |
+#endif |
+ |
+ |
bool IfThenElseInstr::Supports(ComparisonInstr* comparison, |
Value* v1, |
Value* v2) { |
+#if !defined(TARGET_ARCH_DBC) |
bool is_smi_result = BindsToSmiConstant(v1) && BindsToSmiConstant(v2); |
if (comparison->IsStrictCompare()) { |
// Strict comparison with number checks calls a stub and is not supported |
@@ -3389,6 +3477,9 @@ bool IfThenElseInstr::Supports(ComparisonInstr* comparison, |
return false; |
} |
return is_smi_result; |
+#else |
+ return false; |
+#endif // !defined(TARGET_ARCH_DBC) |
} |