Index: runtime/vm/intermediate_language_dbc.cc |
diff --git a/runtime/vm/intermediate_language_dbc.cc b/runtime/vm/intermediate_language_dbc.cc |
index 5f986cb9104f38910a82ebce6ecc2ecd72a0d994..f3057af1452587f8fa4f44cff9f465276874401f 100644 |
--- a/runtime/vm/intermediate_language_dbc.cc |
+++ b/runtime/vm/intermediate_language_dbc.cc |
@@ -43,7 +43,7 @@ DECLARE_FLAG(int, optimization_counter_threshold); |
M(BinaryUint32Op) \ |
M(ShiftUint32Op) \ |
M(UnaryUint32Op) \ |
- M(UnboxedIntConverter) \ |
+ M(UnboxedIntConverter) |
// List of instructions that are not used by DBC. |
// Things we aren't planning to implement for DBC: |
@@ -92,7 +92,7 @@ DECLARE_FLAG(int, optimization_counter_threshold); |
M(Float64x2ZeroArg) \ |
M(Float64x2OneArg) \ |
M(CheckedSmiOp) \ |
- M(CheckedSmiComparison) \ |
+ M(CheckedSmiComparison) |
// Location summaries actually are not used by the unoptimizing DBC compiler |
// because we don't allocate any registers. |
@@ -102,11 +102,12 @@ static LocationSummary* CreateLocationSummary( |
Location output = Location::NoLocation(), |
LocationSummary::ContainsCall contains_call = LocationSummary::kNoCall, |
intptr_t num_temps = 0) { |
- LocationSummary* locs = new(zone) LocationSummary( |
- zone, num_inputs, num_temps, contains_call); |
+ LocationSummary* locs = |
+ new (zone) LocationSummary(zone, num_inputs, num_temps, contains_call); |
for (intptr_t i = 0; i < num_inputs; i++) { |
- locs->set_in(i, (contains_call == LocationSummary::kNoCall) ? |
- Location::RequiresRegister() : Location::RegisterLocation(i)); |
+ locs->set_in(i, (contains_call == LocationSummary::kNoCall) |
+ ? Location::RequiresRegister() |
+ : Location::RegisterLocation(i)); |
} |
for (intptr_t i = 0; i < num_temps; i++) { |
locs->set_temp(i, Location::RequiresRegister()); |
@@ -123,25 +124,25 @@ static LocationSummary* CreateLocationSummary( |
LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \ |
const { \ |
return CreateLocationSummary(zone, __VA_ARGS__); \ |
- } \ |
+ } |
#define EMIT_NATIVE_CODE(Name, ...) \ |
DEFINE_MAKE_LOCATION_SUMMARY(Name, __VA_ARGS__); \ |
- void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) \ |
+ void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) |
#define DEFINE_UNIMPLEMENTED_MAKE_LOCATION_SUMMARY(Name) \ |
LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \ |
const { \ |
if (!opt) UNIMPLEMENTED(); \ |
return NULL; \ |
- } \ |
+ } |
#define DEFINE_UNREACHABLE_MAKE_LOCATION_SUMMARY(Name) \ |
LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \ |
const { \ |
UNREACHABLE(); \ |
return NULL; \ |
- } \ |
+ } |
#define DEFINE_UNIMPLEMENTED_EMIT_NATIVE_CODE(Name) \ |
void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) { \ |
@@ -165,7 +166,7 @@ static LocationSummary* CreateLocationSummary( |
#define DEFINE_UNIMPLEMENTED(Name) \ |
DEFINE_UNIMPLEMENTED_MAKE_LOCATION_SUMMARY(Name) \ |
- DEFINE_UNIMPLEMENTED_EMIT_NATIVE_CODE(Name) \ |
+ DEFINE_UNIMPLEMENTED_EMIT_NATIVE_CODE(Name) |
FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED) |
@@ -173,7 +174,7 @@ FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED) |
#define DEFINE_UNREACHABLE(Name) \ |
DEFINE_UNREACHABLE_MAKE_LOCATION_SUMMARY(Name) \ |
- DEFINE_UNREACHABLE_EMIT_NATIVE_CODE(Name) \ |
+ DEFINE_UNREACHABLE_EMIT_NATIVE_CODE(Name) |
FOR_EACH_UNREACHABLE_INSTRUCTION(DEFINE_UNREACHABLE) |
@@ -184,7 +185,9 @@ FOR_EACH_UNREACHABLE_INSTRUCTION(DEFINE_UNREACHABLE) |
DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(CheckedSmiComparison) |
-EMIT_NATIVE_CODE(InstanceOf, 2, Location::SameAsFirstInput(), |
+EMIT_NATIVE_CODE(InstanceOf, |
+ 2, |
+ Location::SameAsFirstInput(), |
LocationSummary::kCall) { |
SubtypeTestCache& test_cache = SubtypeTestCache::Handle(); |
if (!type().IsVoidType() && type().IsInstantiated()) { |
@@ -200,8 +203,7 @@ EMIT_NATIVE_CODE(InstanceOf, 2, Location::SameAsFirstInput(), |
__ PushConstant(test_cache); |
__ InstanceOf(negate_result() ? 1 : 0); |
compiler->RecordSafepoint(locs()); |
- compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
- deopt_id(), |
+ compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), |
token_pos()); |
if (compiler->is_optimizing()) { |
@@ -210,21 +212,22 @@ EMIT_NATIVE_CODE(InstanceOf, 2, Location::SameAsFirstInput(), |
} |
-DEFINE_MAKE_LOCATION_SUMMARY(AssertAssignable, 2, |
+DEFINE_MAKE_LOCATION_SUMMARY(AssertAssignable, |
+ 2, |
Location::SameAsFirstInput(), |
LocationSummary::kCall); |
EMIT_NATIVE_CODE(AssertBoolean, |
- 1, Location::SameAsFirstInput(), |
+ 1, |
+ Location::SameAsFirstInput(), |
LocationSummary::kCall) { |
if (compiler->is_optimizing()) { |
__ Push(locs()->in(0).reg()); |
} |
__ AssertBoolean(Isolate::Current()->type_checks() ? 1 : 0); |
compiler->RecordSafepoint(locs()); |
- compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
- deopt_id(), |
+ compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), |
token_pos()); |
if (compiler->is_optimizing()) { |
__ Drop1(); |
@@ -233,13 +236,12 @@ EMIT_NATIVE_CODE(AssertBoolean, |
EMIT_NATIVE_CODE(PolymorphicInstanceCall, |
- 0, Location::RegisterLocation(0), |
+ 0, |
+ Location::RegisterLocation(0), |
LocationSummary::kCall) { |
ASSERT(ic_data().NumArgsTested() == 1); |
- const Array& arguments_descriptor = |
- Array::Handle(ArgumentsDescriptor::New( |
- instance_call()->ArgumentCount(), |
- instance_call()->argument_names())); |
+ const Array& arguments_descriptor = Array::Handle(ArgumentsDescriptor::New( |
+ instance_call()->ArgumentCount(), instance_call()->argument_names())); |
const intptr_t argdesc_kidx = __ AddConstant(arguments_descriptor); |
// Push the target onto the stack. |
@@ -247,19 +249,18 @@ EMIT_NATIVE_CODE(PolymorphicInstanceCall, |
const intptr_t may_be_smi = |
(ic_data().GetReceiverClassIdAt(0) == kSmiCid) ? 1 : 0; |
GrowableArray<CidTarget> sorted_ic_data; |
- FlowGraphCompiler::SortICDataByCount(ic_data(), |
- &sorted_ic_data, |
+ FlowGraphCompiler::SortICDataByCount(ic_data(), &sorted_ic_data, |
/* drop_smi = */ true); |
const intptr_t sorted_length = sorted_ic_data.length(); |
if (!Utils::IsUint(8, sorted_length)) { |
Unsupported(compiler); |
UNREACHABLE(); |
} |
- __ PushPolymorphicInstanceCall( |
- instance_call()->ArgumentCount(), sorted_length + may_be_smi); |
+ __ PushPolymorphicInstanceCall(instance_call()->ArgumentCount(), |
+ sorted_length + may_be_smi); |
if (may_be_smi == 1) { |
- const Function& target = Function::ZoneHandle( |
- compiler->zone(), ic_data().GetTargetAt(0)); |
+ const Function& target = |
+ Function::ZoneHandle(compiler->zone(), ic_data().GetTargetAt(0)); |
__ Nop(compiler->ToEmbeddableCid(kSmiCid, this)); |
__ Nop(__ AddConstant(target)); |
} |
@@ -268,8 +269,8 @@ EMIT_NATIVE_CODE(PolymorphicInstanceCall, |
__ Nop(compiler->ToEmbeddableCid(sorted_ic_data[i].cid, this)); |
__ Nop(__ AddConstant(target)); |
} |
- compiler->EmitDeopt( |
- deopt_id(), ICData::kDeoptPolymorphicInstanceCallTestFail, 0); |
+ compiler->EmitDeopt(deopt_id(), |
+ ICData::kDeoptPolymorphicInstanceCallTestFail, 0); |
} else { |
ASSERT(ic_data().HasOneTarget()); |
const Function& target = Function::ZoneHandle(ic_data().GetTargetAt(0)); |
@@ -278,8 +279,8 @@ EMIT_NATIVE_CODE(PolymorphicInstanceCall, |
// Call the function. |
__ StaticCall(instance_call()->ArgumentCount(), argdesc_kidx); |
- compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
- deopt_id(), instance_call()->token_pos()); |
+ compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), |
+ instance_call()->token_pos()); |
compiler->RecordAfterCall(this); |
__ PopLocal(locs()->out(0).reg()); |
} |
@@ -291,11 +292,11 @@ EMIT_NATIVE_CODE(Stop, 0) { |
EMIT_NATIVE_CODE(CheckStackOverflow, |
- 0, Location::NoLocation(), |
+ 0, |
+ Location::NoLocation(), |
LocationSummary::kCall) { |
__ CheckStack(); |
- compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
- deopt_id(), |
+ compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), |
token_pos()); |
compiler->RecordAfterCall(this); |
} |
@@ -319,11 +320,11 @@ EMIT_NATIVE_CODE(StoreLocal, 0) { |
ASSERT(!compiler->is_optimizing()); |
ASSERT(local().index() != 0); |
if (HasTemp()) { |
- __ StoreLocal( |
- (local().index() > 0) ? (-local().index()) : (-local().index() - 1)); |
+ __ StoreLocal((local().index() > 0) ? (-local().index()) |
+ : (-local().index() - 1)); |
} else { |
- __ PopLocal( |
- (local().index() > 0) ? (-local().index()) : (-local().index() - 1)); |
+ __ PopLocal((local().index() > 0) ? (-local().index()) |
+ : (-local().index() - 1)); |
} |
} |
@@ -376,12 +377,12 @@ EMIT_NATIVE_CODE(Return, 1) { |
} |
-LocationSummary* StoreStaticFieldInstr::MakeLocationSummary( |
- Zone* zone, bool opt) const { |
+LocationSummary* StoreStaticFieldInstr::MakeLocationSummary(Zone* zone, |
+ bool opt) const { |
const intptr_t kNumInputs = 1; |
const intptr_t kNumTemps = 1; |
- LocationSummary* locs = new(zone) LocationSummary( |
- zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); |
+ LocationSummary* locs = new (zone) |
+ LocationSummary(zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); |
for (intptr_t i = 0; i < kNumInputs; i++) { |
locs->set_in(i, Location::RequiresRegister()); |
} |
@@ -408,8 +409,7 @@ void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
EMIT_NATIVE_CODE(LoadStaticField, 1, Location::RequiresRegister()) { |
if (compiler->is_optimizing()) { |
- __ LoadField(locs()->out(0).reg(), |
- locs()->in(0).reg(), |
+ __ LoadField(locs()->out(0).reg(), locs()->in(0).reg(), |
Field::static_value_offset() / kWordSize); |
} else { |
const intptr_t kidx = __ AddConstant(StaticField()); |
@@ -437,9 +437,8 @@ EMIT_NATIVE_CODE(ClosureCall, |
} |
intptr_t argument_count = ArgumentCount(); |
- const Array& arguments_descriptor = |
- Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, |
- argument_names())); |
+ const Array& arguments_descriptor = Array::ZoneHandle( |
+ ArgumentsDescriptor::New(argument_count, argument_names())); |
const intptr_t argdesc_kidx = |
compiler->assembler()->AddConstant(arguments_descriptor); |
__ StaticCall(argument_count, argdesc_kidx); |
@@ -481,8 +480,7 @@ static void EmitBranchOnCondition(FlowGraphCompiler* compiler, |
Condition StrictCompareInstr::EmitComparisonCode(FlowGraphCompiler* compiler, |
BranchLabels labels) { |
- ASSERT((kind() == Token::kNE_STRICT) || |
- (kind() == Token::kEQ_STRICT)); |
+ ASSERT((kind() == Token::kNE_STRICT) || (kind() == Token::kEQ_STRICT)); |
Token::Kind comparison; |
Condition condition; |
@@ -492,32 +490,31 @@ Condition StrictCompareInstr::EmitComparisonCode(FlowGraphCompiler* compiler, |
} else { |
// Flip comparison to save a jump. |
condition = NEXT_IS_FALSE; |
- comparison = (kind() == Token::kEQ_STRICT) ? Token::kNE_STRICT |
- : Token::kEQ_STRICT; |
+ comparison = |
+ (kind() == Token::kEQ_STRICT) ? Token::kNE_STRICT : Token::kEQ_STRICT; |
} |
if (!compiler->is_optimizing()) { |
- const Bytecode::Opcode eq_op = needs_number_check() ? |
- Bytecode::kIfEqStrictNumTOS : Bytecode::kIfEqStrictTOS; |
- const Bytecode::Opcode ne_op = needs_number_check() ? |
- Bytecode::kIfNeStrictNumTOS : Bytecode::kIfNeStrictTOS; |
+ const Bytecode::Opcode eq_op = needs_number_check() |
+ ? Bytecode::kIfEqStrictNumTOS |
+ : Bytecode::kIfEqStrictTOS; |
+ const Bytecode::Opcode ne_op = needs_number_check() |
+ ? Bytecode::kIfNeStrictNumTOS |
+ : Bytecode::kIfNeStrictTOS; |
__ Emit(comparison == Token::kEQ_STRICT ? eq_op : ne_op); |
} else { |
- const Bytecode::Opcode eq_op = needs_number_check() ? |
- Bytecode::kIfEqStrictNum : Bytecode::kIfEqStrict; |
- const Bytecode::Opcode ne_op = needs_number_check() ? |
- Bytecode::kIfNeStrictNum : Bytecode::kIfNeStrict; |
- __ Emit(Bytecode::Encode( |
- (comparison == Token::kEQ_STRICT) ? eq_op : ne_op, |
- locs()->in(0).reg(), |
- locs()->in(1).reg())); |
+ const Bytecode::Opcode eq_op = |
+ needs_number_check() ? Bytecode::kIfEqStrictNum : Bytecode::kIfEqStrict; |
+ const Bytecode::Opcode ne_op = |
+ needs_number_check() ? Bytecode::kIfNeStrictNum : Bytecode::kIfNeStrict; |
+ __ Emit(Bytecode::Encode((comparison == Token::kEQ_STRICT) ? eq_op : ne_op, |
+ locs()->in(0).reg(), locs()->in(1).reg())); |
} |
if (needs_number_check() && token_pos().IsReal()) { |
compiler->RecordSafepoint(locs()); |
compiler->AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, |
- Thread::kNoDeoptId, |
- token_pos()); |
+ Thread::kNoDeoptId, token_pos()); |
} |
return condition; |
@@ -526,8 +523,7 @@ Condition StrictCompareInstr::EmitComparisonCode(FlowGraphCompiler* compiler, |
void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, |
BranchInstr* branch) { |
- ASSERT((kind() == Token::kEQ_STRICT) || |
- (kind() == Token::kNE_STRICT)); |
+ ASSERT((kind() == Token::kEQ_STRICT) || (kind() == Token::kNE_STRICT)); |
BranchLabels labels = compiler->CreateBranchLabels(branch); |
Condition true_condition = EmitComparisonCode(compiler, labels); |
@@ -540,11 +536,10 @@ EMIT_NATIVE_CODE(StrictCompare, |
Location::RequiresRegister(), |
needs_number_check() ? LocationSummary::kCall |
: LocationSummary::kNoCall) { |
- ASSERT((kind() == Token::kEQ_STRICT) || |
- (kind() == Token::kNE_STRICT)); |
+ ASSERT((kind() == Token::kEQ_STRICT) || (kind() == Token::kNE_STRICT)); |
Label is_true, is_false; |
- BranchLabels labels = { &is_true, &is_false, &is_false }; |
+ BranchLabels labels = {&is_true, &is_false, &is_false}; |
Condition true_condition = EmitComparisonCode(compiler, labels); |
EmitBranchOnCondition(compiler, true_condition, labels); |
Label done; |
@@ -567,8 +562,7 @@ EMIT_NATIVE_CODE(StrictCompare, |
} |
-LocationSummary* BranchInstr::MakeLocationSummary(Zone* zone, |
- bool opt) const { |
+LocationSummary* BranchInstr::MakeLocationSummary(Zone* zone, bool opt) const { |
comparison()->InitializeLocationSummary(zone, opt); |
if (!comparison()->HasLocs()) { |
return NULL; |
@@ -588,8 +582,7 @@ EMIT_NATIVE_CODE(Goto, 0) { |
if (!compiler->is_optimizing()) { |
// Add a deoptimization descriptor for deoptimizing instructions that |
// may be inserted before this instruction. |
- compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt, |
- GetDeoptId(), |
+ compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt, GetDeoptId(), |
TokenPosition::kNoSource); |
} |
if (HasParallelMove()) { |
@@ -605,8 +598,7 @@ EMIT_NATIVE_CODE(Goto, 0) { |
Condition TestSmiInstr::EmitComparisonCode(FlowGraphCompiler* compiler, |
BranchLabels labels) { |
- ASSERT((kind() == Token::kEQ) || |
- (kind() == Token::kNE)); |
+ ASSERT((kind() == Token::kEQ) || (kind() == Token::kNE)); |
Register left = locs()->in(0).reg(); |
Register right = locs()->in(1).reg(); |
__ TestSmi(left, right); |
@@ -632,7 +624,7 @@ EMIT_NATIVE_CODE(TestSmi, |
Condition TestCidsInstr::EmitComparisonCode(FlowGraphCompiler* compiler, |
- BranchLabels labels) { |
+ BranchLabels labels) { |
ASSERT((kind() == Token::kIS) || (kind() == Token::kISNOT)); |
const Register value = locs()->in(0).reg(); |
const intptr_t true_result = (kind() == Token::kIS) ? 1 : 0; |
@@ -651,8 +643,7 @@ Condition TestCidsInstr::EmitComparisonCode(FlowGraphCompiler* compiler, |
// No match found, deoptimize or false. |
if (CanDeoptimize()) { |
- compiler->EmitDeopt(deopt_id(), |
- ICData::kDeoptTestCids, |
+ compiler->EmitDeopt(deopt_id(), ICData::kDeoptTestCids, |
licm_hoisted_ ? ICData::kHoisted : 0); |
} else { |
Label* target = result ? labels.false_label : labels.true_label; |
@@ -671,11 +662,13 @@ void TestCidsInstr::EmitBranchCode(FlowGraphCompiler* compiler, |
} |
-EMIT_NATIVE_CODE(TestCids, 1, Location::RequiresRegister(), |
+EMIT_NATIVE_CODE(TestCids, |
+ 1, |
+ Location::RequiresRegister(), |
LocationSummary::kNoCall) { |
Register result_reg = locs()->out(0).reg(); |
Label is_true, is_false, done; |
- BranchLabels labels = { &is_true, &is_false, &is_false }; |
+ BranchLabels labels = {&is_true, &is_false, &is_false}; |
EmitComparisonCode(compiler, labels); |
__ Jump(&is_true); |
__ Bind(&is_false); |
@@ -688,7 +681,8 @@ EMIT_NATIVE_CODE(TestCids, 1, Location::RequiresRegister(), |
EMIT_NATIVE_CODE(CreateArray, |
- 2, Location::RequiresRegister(), |
+ 2, |
+ Location::RequiresRegister(), |
LocationSummary::kCall) { |
if (compiler->is_optimizing()) { |
const Register length = locs()->in(kLengthPos).reg(); |
@@ -707,8 +701,11 @@ EMIT_NATIVE_CODE(CreateArray, |
} |
-EMIT_NATIVE_CODE(StoreIndexed, 3, Location::NoLocation(), |
- LocationSummary::kNoCall, 1) { |
+EMIT_NATIVE_CODE(StoreIndexed, |
+ 3, |
+ Location::NoLocation(), |
+ LocationSummary::kNoCall, |
+ 1) { |
if (!compiler->is_optimizing()) { |
ASSERT(class_id() == kArrayCid); |
__ StoreIndexedTOS(); |
@@ -787,8 +784,11 @@ EMIT_NATIVE_CODE(StoreIndexed, 3, Location::NoLocation(), |
} |
-EMIT_NATIVE_CODE(LoadIndexed, 2, Location::RequiresRegister(), |
- LocationSummary::kNoCall, 1) { |
+EMIT_NATIVE_CODE(LoadIndexed, |
+ 2, |
+ Location::RequiresRegister(), |
+ LocationSummary::kNoCall, |
+ 1) { |
ASSERT(compiler->is_optimizing()); |
const Register array = locs()->in(0).reg(); |
const Register index = locs()->in(1).reg(); |
@@ -898,7 +898,8 @@ EMIT_NATIVE_CODE(LoadIndexed, 2, Location::RequiresRegister(), |
EMIT_NATIVE_CODE(StringInterpolate, |
- 1, Location::RegisterLocation(0), |
+ 1, |
+ Location::RegisterLocation(0), |
LocationSummary::kCall) { |
if (compiler->is_optimizing()) { |
__ Push(locs()->in(0).reg()); |
@@ -917,7 +918,8 @@ EMIT_NATIVE_CODE(StringInterpolate, |
EMIT_NATIVE_CODE(NativeCall, |
- 0, Location::NoLocation(), |
+ 0, |
+ Location::NoLocation(), |
LocationSummary::kCall) { |
SetupNative(); |
@@ -937,14 +939,14 @@ EMIT_NATIVE_CODE(NativeCall, |
__ NativeCall(); |
} |
compiler->RecordSafepoint(locs()); |
- compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
- Thread::kNoDeoptId, |
+ compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, |
token_pos()); |
} |
EMIT_NATIVE_CODE(OneByteStringFromCharCode, |
- 1, Location::RequiresRegister(), |
+ 1, |
+ Location::RequiresRegister(), |
LocationSummary::kNoCall) { |
ASSERT(compiler->is_optimizing()); |
const Register char_code = locs()->in(0).reg(); // Char code is a smi. |
@@ -954,7 +956,8 @@ EMIT_NATIVE_CODE(OneByteStringFromCharCode, |
EMIT_NATIVE_CODE(StringToCharCode, |
- 1, Location::RequiresRegister(), |
+ 1, |
+ Location::RequiresRegister(), |
LocationSummary::kNoCall) { |
ASSERT(cid_ == kOneByteStringCid); |
const Register str = locs()->in(0).reg(); |
@@ -964,7 +967,8 @@ EMIT_NATIVE_CODE(StringToCharCode, |
EMIT_NATIVE_CODE(AllocateObject, |
- 0, Location::RequiresRegister(), |
+ 0, |
+ Location::RequiresRegister(), |
LocationSummary::kCall) { |
if (ArgumentCount() == 1) { |
// Allocate with type arguments. |
@@ -979,8 +983,8 @@ EMIT_NATIVE_CODE(AllocateObject, |
ASSERT(cls().id() != kIllegalCid); |
tags = RawObject::ClassIdTag::update(cls().id(), tags); |
if (Smi::IsValid(tags)) { |
- const intptr_t tags_kidx = __ AddConstant( |
- Smi::Handle(Smi::New(tags))); |
+ const intptr_t tags_kidx = |
+ __ AddConstant(Smi::Handle(Smi::New(tags))); |
__ AllocateTOpt(locs()->out(0).reg(), tags_kidx); |
__ Nop(cls().type_arguments_field_offset()); |
} |
@@ -988,16 +992,14 @@ EMIT_NATIVE_CODE(AllocateObject, |
__ PushConstant(cls()); |
__ AllocateT(); |
compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
- Thread::kNoDeoptId, |
- token_pos()); |
+ Thread::kNoDeoptId, token_pos()); |
compiler->RecordSafepoint(locs()); |
__ PopLocal(locs()->out(0).reg()); |
} else { |
__ PushConstant(cls()); |
__ AllocateT(); |
compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
- Thread::kNoDeoptId, |
- token_pos()); |
+ Thread::kNoDeoptId, token_pos()); |
compiler->RecordSafepoint(locs()); |
} |
} else if (compiler->is_optimizing()) { |
@@ -1017,16 +1019,14 @@ EMIT_NATIVE_CODE(AllocateObject, |
} |
const intptr_t kidx = __ AddConstant(cls()); |
__ Allocate(kidx); |
- compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
- Thread::kNoDeoptId, |
+ compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, |
token_pos()); |
compiler->RecordSafepoint(locs()); |
__ PopLocal(locs()->out(0).reg()); |
} else { |
const intptr_t kidx = __ AddConstant(cls()); |
__ Allocate(kidx); |
- compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
- Thread::kNoDeoptId, |
+ compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, |
token_pos()); |
compiler->RecordSafepoint(locs()); |
} |
@@ -1090,42 +1090,42 @@ EMIT_NATIVE_CODE(BooleanNegate, 1, Location::RequiresRegister()) { |
EMIT_NATIVE_CODE(AllocateContext, |
- 0, Location::RequiresRegister(), |
+ 0, |
+ Location::RequiresRegister(), |
LocationSummary::kCall) { |
ASSERT(!compiler->is_optimizing()); |
__ AllocateContext(num_context_variables()); |
compiler->RecordSafepoint(locs()); |
- compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
- Thread::kNoDeoptId, |
+ compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, |
token_pos()); |
} |
EMIT_NATIVE_CODE(AllocateUninitializedContext, |
- 0, Location::RequiresRegister(), |
+ 0, |
+ Location::RequiresRegister(), |
LocationSummary::kCall) { |
ASSERT(compiler->is_optimizing()); |
__ AllocateUninitializedContext(locs()->out(0).reg(), |
num_context_variables()); |
__ AllocateContext(num_context_variables()); |
compiler->RecordSafepoint(locs()); |
- compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
- Thread::kNoDeoptId, |
+ compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, |
token_pos()); |
__ PopLocal(locs()->out(0).reg()); |
} |
EMIT_NATIVE_CODE(CloneContext, |
- 1, Location::RequiresRegister(), |
+ 1, |
+ Location::RequiresRegister(), |
LocationSummary::kCall) { |
if (compiler->is_optimizing()) { |
__ Push(locs()->in(0).reg()); |
} |
__ CloneContext(); |
compiler->RecordSafepoint(locs()); |
- compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
- Thread::kNoDeoptId, |
+ compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, |
token_pos()); |
if (compiler->is_optimizing()) { |
__ PopLocal(locs()->out(0).reg()); |
@@ -1135,11 +1135,9 @@ EMIT_NATIVE_CODE(CloneContext, |
EMIT_NATIVE_CODE(CatchBlockEntry, 0) { |
__ Bind(compiler->GetJumpLabel(this)); |
- compiler->AddExceptionHandler(catch_try_index(), |
- try_index(), |
+ compiler->AddExceptionHandler(catch_try_index(), try_index(), |
compiler->assembler()->CodeSize(), |
- catch_handler_types_, |
- needs_stacktrace()); |
+ catch_handler_types_, needs_stacktrace()); |
if (HasParallelMove()) { |
compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); |
@@ -1149,9 +1147,11 @@ EMIT_NATIVE_CODE(CatchBlockEntry, 0) { |
// of the allocatable register range. |
const intptr_t num_non_copied_params = |
compiler->flow_graph().num_non_copied_params(); |
- const intptr_t exception_reg = kNumberOfCpuRegisters - |
+ const intptr_t exception_reg = |
+ kNumberOfCpuRegisters - |
(-exception_var().index() + num_non_copied_params); |
- const intptr_t stacktrace_reg = kNumberOfCpuRegisters - |
+ const intptr_t stacktrace_reg = |
+ kNumberOfCpuRegisters - |
(-stacktrace_var().index() + num_non_copied_params); |
__ MoveSpecial(exception_reg, Simulator::kExceptionSpecialIndex); |
__ MoveSpecial(stacktrace_reg, Simulator::kStacktraceSpecialIndex); |
@@ -1167,8 +1167,7 @@ EMIT_NATIVE_CODE(CatchBlockEntry, 0) { |
EMIT_NATIVE_CODE(Throw, 0, Location::NoLocation(), LocationSummary::kCall) { |
__ Throw(0); |
- compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
- deopt_id(), |
+ compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), |
token_pos()); |
compiler->RecordAfterCall(this); |
__ Trap(); |
@@ -1178,23 +1177,22 @@ EMIT_NATIVE_CODE(Throw, 0, Location::NoLocation(), LocationSummary::kCall) { |
EMIT_NATIVE_CODE(ReThrow, 0, Location::NoLocation(), LocationSummary::kCall) { |
compiler->SetNeedsStacktrace(catch_try_index()); |
__ Throw(1); |
- compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
- deopt_id(), |
+ compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), |
token_pos()); |
compiler->RecordAfterCall(this); |
__ Trap(); |
} |
EMIT_NATIVE_CODE(InstantiateType, |
- 1, Location::RequiresRegister(), |
+ 1, |
+ Location::RequiresRegister(), |
LocationSummary::kCall) { |
if (compiler->is_optimizing()) { |
__ Push(locs()->in(0).reg()); |
} |
__ InstantiateType(__ AddConstant(type())); |
compiler->RecordSafepoint(locs()); |
- compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
- deopt_id(), |
+ compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), |
token_pos()); |
if (compiler->is_optimizing()) { |
__ PopLocal(locs()->out(0).reg()); |
@@ -1202,7 +1200,8 @@ EMIT_NATIVE_CODE(InstantiateType, |
} |
EMIT_NATIVE_CODE(InstantiateTypeArguments, |
- 1, Location::RequiresRegister(), |
+ 1, |
+ Location::RequiresRegister(), |
LocationSummary::kCall) { |
if (compiler->is_optimizing()) { |
__ Push(locs()->in(0).reg()); |
@@ -1211,8 +1210,7 @@ EMIT_NATIVE_CODE(InstantiateTypeArguments, |
type_arguments().IsRawInstantiatedRaw(type_arguments().Length()), |
__ AddConstant(type_arguments())); |
compiler->RecordSafepoint(locs()); |
- compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
- deopt_id(), |
+ compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), |
token_pos()); |
if (compiler->is_optimizing()) { |
__ PopLocal(locs()->out(0).reg()); |
@@ -1234,8 +1232,8 @@ void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
LocationSummary* Instruction::MakeCallSummary(Zone* zone) { |
- LocationSummary* result = new(zone) LocationSummary( |
- zone, 0, 0, LocationSummary::kCall); |
+ LocationSummary* result = |
+ new (zone) LocationSummary(zone, 0, 0, LocationSummary::kCall); |
// TODO(vegorov) support allocating out registers for calls. |
// Currently we require them to be fixed. |
result->set_out(0, Location::RegisterLocation(0)); |
@@ -1262,8 +1260,8 @@ static const intptr_t kMintShiftCountLimit = 63; |
bool ShiftMintOpInstr::has_shift_count_check() const { |
- return !RangeUtils::IsWithin( |
- right()->definition()->range(), 0, kMintShiftCountLimit); |
+ return !RangeUtils::IsWithin(right()->definition()->range(), 0, |
+ kMintShiftCountLimit); |
} |
@@ -1389,21 +1387,20 @@ Representation StoreIndexedInstr::RequiredInputRepresentation( |
void Environment::DropArguments(intptr_t argc) { |
#if defined(DEBUG) |
- // Check that we are in the backend - register allocation has been run. |
- ASSERT(locations_ != NULL); |
+ // Check that we are in the backend - register allocation has been run. |
+ ASSERT(locations_ != NULL); |
- // Check that we are only dropping a valid number of instructions from the |
- // environment. |
- ASSERT(argc <= values_.length()); |
+ // Check that we are only dropping a valid number of instructions from the |
+ // environment. |
+ ASSERT(argc <= values_.length()); |
#endif |
- values_.TruncateTo(values_.length() - argc); |
+ values_.TruncateTo(values_.length() - argc); |
} |
EMIT_NATIVE_CODE(CheckSmi, 1) { |
__ CheckSmi(locs()->in(0).reg()); |
- compiler->EmitDeopt(deopt_id(), |
- ICData::kDeoptCheckSmi, |
+ compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckSmi, |
licm_hoisted_ ? ICData::kHoisted : 0); |
} |
@@ -1418,8 +1415,7 @@ EMIT_NATIVE_CODE(CheckEitherNonSmi, 2) { |
EMIT_NATIVE_CODE(CheckClassId, 1) { |
- __ CheckClassId(locs()->in(0).reg(), |
- compiler->ToEmbeddableCid(cid_, this)); |
+ __ CheckClassId(locs()->in(0).reg(), compiler->ToEmbeddableCid(cid_, this)); |
compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckClass); |
} |
@@ -1452,8 +1448,7 @@ EMIT_NATIVE_CODE(CheckClass, 1) { |
__ Nop(__ AddConstant(Smi::Handle(Smi::New(cid_mask)))); |
} else { |
GrowableArray<CidTarget> sorted_ic_data; |
- FlowGraphCompiler::SortICDataByCount(unary_checks(), |
- &sorted_ic_data, |
+ FlowGraphCompiler::SortICDataByCount(unary_checks(), &sorted_ic_data, |
/* drop_smi = */ true); |
const intptr_t sorted_length = sorted_ic_data.length(); |
if (!Utils::IsUint(8, sorted_length)) { |
@@ -1466,8 +1461,7 @@ EMIT_NATIVE_CODE(CheckClass, 1) { |
} |
} |
} |
- compiler->EmitDeopt(deopt_id(), |
- ICData::kDeoptCheckClass, |
+ compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckClass, |
licm_hoisted_ ? ICData::kHoisted : 0); |
} |
@@ -1565,8 +1559,7 @@ EMIT_NATIVE_CODE(Box, 1, Location::RequiresRegister(), LocationSummary::kCall) { |
} |
const intptr_t kidx = __ AddConstant(compiler->double_class()); |
__ Allocate(kidx); |
- compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
- Thread::kNoDeoptId, |
+ compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, |
token_pos()); |
compiler->RecordSafepoint(locs()); |
__ PopLocal(out); |
@@ -1585,7 +1578,7 @@ EMIT_NATIVE_CODE(Unbox, 1, Location::RequiresRegister()) { |
} else if (CanConvertSmi() && (value_cid == kSmiCid)) { |
__ SmiToDouble(result, box); |
} else if ((value()->Type()->ToNullableCid() == box_cid) && |
- value()->Type()->is_nullable()) { |
+ value()->Type()->is_nullable()) { |
__ IfEqNull(box); |
compiler->EmitDeopt(GetDeoptId(), ICData::kDeoptCheckClass); |
__ UnboxDouble(result, box); |
@@ -1651,11 +1644,20 @@ EMIT_NATIVE_CODE(BinaryDoubleOp, 2, Location::RequiresRegister()) { |
const Register right = locs()->in(1).reg(); |
const Register result = locs()->out(0).reg(); |
switch (op_kind()) { |
- case Token::kADD: __ DAdd(result, left, right); break; |
- case Token::kSUB: __ DSub(result, left, right); break; |
- case Token::kMUL: __ DMul(result, left, right); break; |
- case Token::kDIV: __ DDiv(result, left, right); break; |
- default: UNREACHABLE(); |
+ case Token::kADD: |
+ __ DAdd(result, left, right); |
+ break; |
+ case Token::kSUB: |
+ __ DSub(result, left, right); |
+ break; |
+ case Token::kMUL: |
+ __ DMul(result, left, right); |
+ break; |
+ case Token::kDIV: |
+ __ DDiv(result, left, right); |
+ break; |
+ default: |
+ UNREACHABLE(); |
} |
} |
@@ -1683,8 +1685,8 @@ void DoubleTestOpInstr::EmitBranchCode(FlowGraphCompiler* compiler, |
UNREACHABLE(); |
} |
const bool is_negated = kind() != Token::kEQ; |
- EmitBranchOnCondition( |
- compiler, is_negated ? NEXT_IS_FALSE : NEXT_IS_TRUE, labels); |
+ EmitBranchOnCondition(compiler, is_negated ? NEXT_IS_FALSE : NEXT_IS_TRUE, |
+ labels); |
} |
@@ -1763,7 +1765,8 @@ EMIT_NATIVE_CODE(FloatToDouble, 1, Location::RequiresRegister()) { |
EMIT_NATIVE_CODE(InvokeMathCFunction, |
- InputCount(), Location::RequiresRegister()) { |
+ InputCount(), |
+ Location::RequiresRegister()) { |
const Register left = locs()->in(0).reg(); |
const Register result = locs()->out(0).reg(); |
if (recognized_kind() == MethodRecognizer::kMathDoublePow) { |
@@ -1808,12 +1811,18 @@ EMIT_NATIVE_CODE(MathMinMax, 2, Location::RequiresRegister()) { |
static Token::Kind FlipCondition(Token::Kind kind) { |
switch (kind) { |
- case Token::kEQ: return Token::kNE; |
- case Token::kNE: return Token::kEQ; |
- case Token::kLT: return Token::kGTE; |
- case Token::kGT: return Token::kLTE; |
- case Token::kLTE: return Token::kGT; |
- case Token::kGTE: return Token::kLT; |
+ case Token::kEQ: |
+ return Token::kNE; |
+ case Token::kNE: |
+ return Token::kEQ; |
+ case Token::kLT: |
+ return Token::kGTE; |
+ case Token::kGT: |
+ return Token::kLTE; |
+ case Token::kLTE: |
+ return Token::kGT; |
+ case Token::kGTE: |
+ return Token::kLT; |
default: |
UNREACHABLE(); |
return Token::kNE; |
@@ -1823,12 +1832,18 @@ static Token::Kind FlipCondition(Token::Kind kind) { |
static Bytecode::Opcode OpcodeForSmiCondition(Token::Kind kind) { |
switch (kind) { |
- case Token::kEQ: return Bytecode::kIfEqStrict; |
- case Token::kNE: return Bytecode::kIfNeStrict; |
- case Token::kLT: return Bytecode::kIfLt; |
- case Token::kGT: return Bytecode::kIfGt; |
- case Token::kLTE: return Bytecode::kIfLe; |
- case Token::kGTE: return Bytecode::kIfGe; |
+ case Token::kEQ: |
+ return Bytecode::kIfEqStrict; |
+ case Token::kNE: |
+ return Bytecode::kIfNeStrict; |
+ case Token::kLT: |
+ return Bytecode::kIfLt; |
+ case Token::kGT: |
+ return Bytecode::kIfGt; |
+ case Token::kLTE: |
+ return Bytecode::kIfLe; |
+ case Token::kGTE: |
+ return Bytecode::kIfGe; |
default: |
UNREACHABLE(); |
return Bytecode::kTrap; |
@@ -1838,12 +1853,18 @@ static Bytecode::Opcode OpcodeForSmiCondition(Token::Kind kind) { |
static Bytecode::Opcode OpcodeForDoubleCondition(Token::Kind kind) { |
switch (kind) { |
- case Token::kEQ: return Bytecode::kIfDEq; |
- case Token::kNE: return Bytecode::kIfDNe; |
- case Token::kLT: return Bytecode::kIfDLt; |
- case Token::kGT: return Bytecode::kIfDGt; |
- case Token::kLTE: return Bytecode::kIfDLe; |
- case Token::kGTE: return Bytecode::kIfDGe; |
+ case Token::kEQ: |
+ return Bytecode::kIfDEq; |
+ case Token::kNE: |
+ return Bytecode::kIfDNe; |
+ case Token::kLT: |
+ return Bytecode::kIfDLt; |
+ case Token::kGT: |
+ return Bytecode::kIfDGt; |
+ case Token::kLTE: |
+ return Bytecode::kIfDLe; |
+ case Token::kGTE: |
+ return Bytecode::kIfDGe; |
default: |
UNREACHABLE(); |
return Bytecode::kTrap; |
@@ -1907,7 +1928,7 @@ EMIT_NATIVE_CODE(EqualityCompare, 2, Location::RequiresRegister()) { |
// These labels are not used. They are arranged so that EmitComparisonCode |
// emits a test that executes the following instruction when the test |
// succeeds. |
- BranchLabels labels = { &is_true, &is_false, &is_false }; |
+ BranchLabels labels = {&is_true, &is_false, &is_false}; |
const Register result = locs()->out(0).reg(); |
__ LoadConstant(result, Bool::False()); |
Condition true_condition = EmitComparisonCode(compiler, labels); |
@@ -1939,7 +1960,7 @@ Condition RelationalOpInstr::EmitComparisonCode(FlowGraphCompiler* compiler, |
EMIT_NATIVE_CODE(RelationalOp, 2, Location::RequiresRegister()) { |
ASSERT(compiler->is_optimizing()); |
Label is_true, is_false; |
- BranchLabels labels = { &is_true, &is_false, &is_false }; |
+ BranchLabels labels = {&is_true, &is_false, &is_false}; |
const Register result = locs()->out(0).reg(); |
__ LoadConstant(result, Bool::False()); |
Condition true_condition = EmitComparisonCode(compiler, labels); |
@@ -1962,16 +1983,14 @@ EMIT_NATIVE_CODE(CheckArrayBound, 2) { |
const intptr_t index_cid = this->index()->Type()->ToCid(); |
if (index_cid != kSmiCid) { |
__ CheckSmi(index); |
- compiler->EmitDeopt(deopt_id(), |
- ICData::kDeoptCheckArrayBound, |
+ compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckArrayBound, |
(generalized_ ? ICData::kGeneralized : 0) | |
- (licm_hoisted_ ? ICData::kHoisted : 0)); |
+ (licm_hoisted_ ? ICData::kHoisted : 0)); |
} |
__ IfULe(length, index); |
- compiler->EmitDeopt(deopt_id(), |
- ICData::kDeoptCheckArrayBound, |
+ compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckArrayBound, |
(generalized_ ? ICData::kGeneralized : 0) | |
- (licm_hoisted_ ? ICData::kHoisted : 0)); |
+ (licm_hoisted_ ? ICData::kHoisted : 0)); |
} |
} // namespace dart |