OLD | NEW |
---|---|
1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_DBC. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_DBC. |
6 #if defined(TARGET_ARCH_DBC) | 6 #if defined(TARGET_ARCH_DBC) |
7 | 7 |
8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
9 | 9 |
10 #include "vm/cpu.h" | 10 #include "vm/cpu.h" |
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
94 M(Float64x2OneArg) \ | 94 M(Float64x2OneArg) \ |
95 M(ExtractNthOutput) \ | 95 M(ExtractNthOutput) \ |
96 M(BinaryUint32Op) \ | 96 M(BinaryUint32Op) \ |
97 M(ShiftUint32Op) \ | 97 M(ShiftUint32Op) \ |
98 M(UnaryUint32Op) \ | 98 M(UnaryUint32Op) \ |
99 M(UnboxedIntConverter) \ | 99 M(UnboxedIntConverter) \ |
100 M(GrowRegExpStack) \ | 100 M(GrowRegExpStack) \ |
101 M(BoxInteger32) \ | 101 M(BoxInteger32) \ |
102 M(UnboxInteger32) \ | 102 M(UnboxInteger32) \ |
103 M(CheckedSmiOp) \ | 103 M(CheckedSmiOp) \ |
104 M(CheckArrayBound) \ | |
105 M(RelationalOp) \ | |
106 M(EqualityCompare) \ | |
107 M(LoadIndexed) | |
108 | 104 |
109 // Location summaries actually are not used by the unoptimizing DBC compiler | 105 // Location summaries actually are not used by the unoptimizing DBC compiler |
110 // because we don't allocate any registers. | 106 // because we don't allocate any registers. |
111 static LocationSummary* CreateLocationSummary( | 107 static LocationSummary* CreateLocationSummary( |
112 Zone* zone, | 108 Zone* zone, |
113 intptr_t num_inputs, | 109 intptr_t num_inputs, |
114 Location output = Location::NoLocation(), | 110 Location output = Location::NoLocation(), |
115 LocationSummary::ContainsCall contains_call = LocationSummary::kNoCall) { | 111 LocationSummary::ContainsCall contains_call = LocationSummary::kNoCall) { |
116 const intptr_t kNumTemps = 0; | 112 const intptr_t kNumTemps = 0; |
117 LocationSummary* locs = new(zone) LocationSummary( | 113 LocationSummary* locs = new(zone) LocationSummary( |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
162 | 158 |
163 #define DEFINE_UNIMPLEMENTED(Name) \ | 159 #define DEFINE_UNIMPLEMENTED(Name) \ |
164 DEFINE_UNIMPLEMENTED_MAKE_LOCATION_SUMMARY(Name) \ | 160 DEFINE_UNIMPLEMENTED_MAKE_LOCATION_SUMMARY(Name) \ |
165 DEFINE_UNIMPLEMENTED_EMIT_NATIVE_CODE(Name) \ | 161 DEFINE_UNIMPLEMENTED_EMIT_NATIVE_CODE(Name) \ |
166 | 162 |
167 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED) | 163 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED) |
168 | 164 |
169 #undef DEFINE_UNIMPLEMENTED | 165 #undef DEFINE_UNIMPLEMENTED |
170 | 166 |
171 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(TestCids) | 167 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(TestCids) |
172 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(RelationalOp) | |
173 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(EqualityCompare) | |
174 | 168 |
175 | 169 |
176 EMIT_NATIVE_CODE(InstanceOf, 2, Location::SameAsFirstInput(), | 170 EMIT_NATIVE_CODE(InstanceOf, 2, Location::SameAsFirstInput(), |
177 LocationSummary::kCall) { | 171 LocationSummary::kCall) { |
178 SubtypeTestCache& test_cache = SubtypeTestCache::Handle(); | 172 SubtypeTestCache& test_cache = SubtypeTestCache::Handle(); |
179 if (!type().IsVoidType() && type().IsInstantiated()) { | 173 if (!type().IsVoidType() && type().IsInstantiated()) { |
180 test_cache = SubtypeTestCache::New(); | 174 test_cache = SubtypeTestCache::New(); |
181 } | 175 } |
182 | 176 |
183 if (compiler->is_optimizing()) { | 177 if (compiler->is_optimizing()) { |
(...skipping 391 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
575 __ StoreIndexed(locs()->in(kArrayPos).reg(), | 569 __ StoreIndexed(locs()->in(kArrayPos).reg(), |
576 locs()->in(kIndexPos).reg(), | 570 locs()->in(kIndexPos).reg(), |
577 locs()->in(kValuePos).reg()); | 571 locs()->in(kValuePos).reg()); |
578 } else { | 572 } else { |
579 ASSERT(class_id() == kArrayCid); | 573 ASSERT(class_id() == kArrayCid); |
580 __ StoreIndexedTOS(); | 574 __ StoreIndexedTOS(); |
581 } | 575 } |
582 } | 576 } |
583 | 577 |
584 | 578 |
579 EMIT_NATIVE_CODE(LoadIndexed, 2, Location::RequiresRegister()) { | |
580 ASSERT(compiler->is_optimizing()); | |
581 if (class_id() != kArrayCid) { | |
582 #if defined(PRODUCT) | |
583 compiler->Bailout("LoadIndexed"); | |
584 #else // defined(PRODUCT) | |
585 compiler->Bailout(ToCString()); | |
586 #endif // defined(PRODUCT) | |
587 } | |
588 const Register array = locs()->in(0).reg(); | |
589 const Register index = locs()->in(1).reg(); | |
590 const Register result = locs()->out(0).reg(); | |
591 | |
592 __ LoadIndexed(result, array, index); | |
593 } | |
594 | |
595 | |
585 EMIT_NATIVE_CODE(StringInterpolate, | 596 EMIT_NATIVE_CODE(StringInterpolate, |
586 1, Location::RegisterLocation(0), | 597 1, Location::RegisterLocation(0), |
587 LocationSummary::kCall) { | 598 LocationSummary::kCall) { |
588 if (compiler->is_optimizing()) { | 599 if (compiler->is_optimizing()) { |
589 __ Push(locs()->in(0).reg()); | 600 __ Push(locs()->in(0).reg()); |
590 } | 601 } |
591 const intptr_t kArgumentCount = 1; | 602 const intptr_t kArgumentCount = 1; |
592 const Array& arguments_descriptor = Array::Handle( | 603 const Array& arguments_descriptor = Array::Handle( |
593 ArgumentsDescriptor::New(kArgumentCount, Object::null_array())); | 604 ArgumentsDescriptor::New(kArgumentCount, Object::null_array())); |
594 __ PushConstant(CallFunction()); | 605 __ PushConstant(CallFunction()); |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
642 EMIT_NATIVE_CODE(StringToCharCode, | 653 EMIT_NATIVE_CODE(StringToCharCode, |
643 1, Location::RequiresRegister(), | 654 1, Location::RequiresRegister(), |
644 LocationSummary::kNoCall) { | 655 LocationSummary::kNoCall) { |
645 ASSERT(cid_ == kOneByteStringCid); | 656 ASSERT(cid_ == kOneByteStringCid); |
646 const Register str = locs()->in(0).reg(); | 657 const Register str = locs()->in(0).reg(); |
647 const Register result = locs()->out(0).reg(); // Result char code is a smi. | 658 const Register result = locs()->out(0).reg(); // Result char code is a smi. |
648 __ StringToCharCode(result, str); | 659 __ StringToCharCode(result, str); |
649 } | 660 } |
650 | 661 |
651 | 662 |
652 | |
653 EMIT_NATIVE_CODE(AllocateObject, | 663 EMIT_NATIVE_CODE(AllocateObject, |
654 0, Location::RequiresRegister(), | 664 0, Location::RequiresRegister(), |
655 LocationSummary::kCall) { | 665 LocationSummary::kCall) { |
656 if (ArgumentCount() == 1) { | 666 if (ArgumentCount() == 1) { |
657 __ PushConstant(cls()); | 667 __ PushConstant(cls()); |
658 __ AllocateT(); | 668 __ AllocateT(); |
659 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, | 669 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
660 Thread::kNoDeoptId, | 670 Thread::kNoDeoptId, |
661 token_pos()); | 671 token_pos()); |
662 } else { | 672 } else { |
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
741 __ MoveSpecial(-exception_var().index()-1, | 751 __ MoveSpecial(-exception_var().index()-1, |
742 Simulator::kExceptionSpecialIndex); | 752 Simulator::kExceptionSpecialIndex); |
743 __ MoveSpecial(-stacktrace_var().index()-1, | 753 __ MoveSpecial(-stacktrace_var().index()-1, |
744 Simulator::kStacktraceSpecialIndex); | 754 Simulator::kStacktraceSpecialIndex); |
745 __ SetFrame(compiler->StackSize()); | 755 __ SetFrame(compiler->StackSize()); |
746 } | 756 } |
747 | 757 |
748 | 758 |
749 EMIT_NATIVE_CODE(Throw, 0, Location::NoLocation(), LocationSummary::kCall) { | 759 EMIT_NATIVE_CODE(Throw, 0, Location::NoLocation(), LocationSummary::kCall) { |
750 __ Throw(0); | 760 __ Throw(0); |
751 compiler->RecordSafepoint(locs()); | |
752 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, | 761 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
753 deopt_id(), | 762 deopt_id(), |
754 token_pos()); | 763 token_pos()); |
764 compiler->RecordAfterCall(this); | |
755 __ Trap(); | 765 __ Trap(); |
756 } | 766 } |
757 | 767 |
758 | 768 |
759 EMIT_NATIVE_CODE(ReThrow, 0, Location::NoLocation(), LocationSummary::kCall) { | 769 EMIT_NATIVE_CODE(ReThrow, 0, Location::NoLocation(), LocationSummary::kCall) { |
760 compiler->SetNeedsStacktrace(catch_try_index()); | 770 compiler->SetNeedsStacktrace(catch_try_index()); |
761 __ Throw(1); | 771 __ Throw(1); |
762 compiler->RecordSafepoint(locs()); | |
763 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, | 772 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, |
764 deopt_id(), | 773 deopt_id(), |
765 token_pos()); | 774 token_pos()); |
775 compiler->RecordAfterCall(this); | |
766 __ Trap(); | 776 __ Trap(); |
767 } | 777 } |
768 | 778 |
769 EMIT_NATIVE_CODE(InstantiateType, | 779 EMIT_NATIVE_CODE(InstantiateType, |
770 1, Location::RequiresRegister(), | 780 1, Location::RequiresRegister(), |
771 LocationSummary::kCall) { | 781 LocationSummary::kCall) { |
772 if (compiler->is_optimizing()) { | 782 if (compiler->is_optimizing()) { |
773 __ Push(locs()->in(0).reg()); | 783 __ Push(locs()->in(0).reg()); |
774 } | 784 } |
775 __ InstantiateType(__ AddConstant(type())); | 785 __ InstantiateType(__ AddConstant(type())); |
(...skipping 380 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1156 break; | 1166 break; |
1157 } | 1167 } |
1158 case Token::kBIT_NOT: | 1168 case Token::kBIT_NOT: |
1159 __ BitNot(locs()->out(0).reg(), locs()->in(0).reg()); | 1169 __ BitNot(locs()->out(0).reg(), locs()->in(0).reg()); |
1160 break; | 1170 break; |
1161 default: | 1171 default: |
1162 UNREACHABLE(); | 1172 UNREACHABLE(); |
1163 } | 1173 } |
1164 } | 1174 } |
1165 | 1175 |
1176 | |
1177 static Token::Kind FlipCondition(Token::Kind kind) { | |
1178 switch (kind) { | |
1179 case Token::kEQ: return Token::kNE; | |
1180 case Token::kNE: return Token::kEQ; | |
1181 case Token::kLT: return Token::kGTE; | |
1182 case Token::kGT: return Token::kLTE; | |
1183 case Token::kLTE: return Token::kGT; | |
1184 case Token::kGTE: return Token::kLT; | |
1185 default: | |
1186 UNREACHABLE(); | |
1187 return Token::kNE; | |
1188 } | |
1189 } | |
1190 | |
1191 | |
1192 static Bytecode::Opcode OpcodeForCondition(Token::Kind kind) { | |
1193 switch (kind) { | |
1194 case Token::kEQ: return Bytecode::kIfEqStrict; | |
1195 case Token::kNE: return Bytecode::kIfNeStrict; | |
1196 case Token::kLT: return Bytecode::kIfLt; | |
1197 case Token::kGT: return Bytecode::kIfGt; | |
1198 case Token::kLTE: return Bytecode::kIfLe; | |
1199 case Token::kGTE: return Bytecode::kIfGe; | |
1200 default: | |
1201 UNREACHABLE(); | |
1202 return Bytecode::kTrap; | |
1203 } | |
1204 } | |
1205 | |
1206 | |
1207 static Condition EmitSmiComparisonOp(FlowGraphCompiler* compiler, | |
1208 LocationSummary* locs, | |
1209 Token::Kind kind, | |
1210 BranchLabels labels) { | |
1211 const Register left = locs->in(0).reg(); | |
1212 const Register right = locs->in(1).reg(); | |
1213 Token::Kind comparison = kind; | |
1214 Condition condition = NEXT_IS_TRUE; | |
1215 if (labels.fall_through != labels.false_label) { | |
Vyacheslav Egorov (Google)
2016/07/01 16:15:35
I don't get how this works. If label.fall_through
zra
2016/07/01 17:43:44
Added some comments here and to EmitBranchOnCondit
| |
1216 // Flip comparision to save a jump. | |
1217 condition = NEXT_IS_FALSE; | |
1218 comparison = FlipCondition(kind); | |
1219 } | |
1220 __ Emit(Bytecode::Encode(OpcodeForCondition(comparison), left, right)); | |
1221 return condition; | |
1222 } | |
1223 | |
1224 | |
1225 Condition EqualityCompareInstr::EmitComparisonCode(FlowGraphCompiler* compiler, | |
1226 BranchLabels labels) { | |
1227 if (operation_cid() == kSmiCid) { | |
1228 return EmitSmiComparisonOp(compiler, locs(), kind(), labels); | |
1229 } else { | |
1230 ASSERT(operation_cid() == kDoubleCid); | |
1231 #if defined(PRODUCT) | |
1232 compiler->Bailout("EqualityCompareInstr::EmitComparisonCode"); | |
1233 #else // defined(PRODUCT) | |
1234 compiler->Bailout(ToCString()); | |
1235 #endif // defined(PRODUCT) | |
1236 return NEXT_IS_FALSE; | |
1237 } | |
1238 } | |
1239 | |
1240 | |
1241 EMIT_NATIVE_CODE(EqualityCompare, 2, Location::RequiresRegister()) { | |
1242 ASSERT(compiler->is_optimizing()); | |
1243 ASSERT((kind() == Token::kEQ) || (kind() == Token::kNE)); | |
1244 Label is_true, is_false; | |
1245 BranchLabels labels = { &is_true, &is_false, &is_false }; | |
Vyacheslav Egorov (Google)
2016/07/01 16:15:35
This does not match the code below. This configura
zra
2016/07/01 17:43:45
The labels don't make sense here because we aren't
| |
1246 const Register result = locs()->out(0).reg(); | |
1247 __ LoadConstant(result, Bool::False()); | |
1248 Condition true_condition = EmitComparisonCode(compiler, labels); | |
1249 ASSERT(true_condition == NEXT_IS_TRUE); | |
1250 __ LoadConstant(result, Bool::True()); | |
1251 } | |
1252 | |
1253 | |
1254 void EqualityCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, | |
1255 BranchInstr* branch) { | |
1256 ASSERT((kind() == Token::kNE) || (kind() == Token::kEQ)); | |
1257 BranchLabels labels = compiler->CreateBranchLabels(branch); | |
1258 Condition true_condition = EmitComparisonCode(compiler, labels); | |
1259 EmitBranchOnCondition(compiler, true_condition, labels); | |
1260 } | |
1261 | |
1262 | |
1263 Condition RelationalOpInstr::EmitComparisonCode(FlowGraphCompiler* compiler, | |
1264 BranchLabels labels) { | |
1265 if (operation_cid() == kSmiCid) { | |
1266 return EmitSmiComparisonOp(compiler, locs(), kind(), labels); | |
1267 } else { | |
1268 ASSERT(operation_cid() == kDoubleCid); | |
1269 #if defined(PRODUCT) | |
1270 compiler->Bailout("EqualityCompareInstr::EmitComparisonCode"); | |
1271 #else // defined(PRODUCT) | |
1272 compiler->Bailout(ToCString()); | |
1273 #endif // defined(PRODUCT) | |
1274 return NEXT_IS_FALSE; | |
1275 } | |
1276 } | |
1277 | |
1278 | |
1279 EMIT_NATIVE_CODE(RelationalOp, 2, Location::RequiresRegister()) { | |
1280 ASSERT(compiler->is_optimizing()); | |
1281 Label is_true, is_false; | |
1282 BranchLabels labels = { &is_true, &is_false, &is_false }; | |
Vyacheslav Egorov (Google)
2016/07/01 16:15:35
The same issue again. You are falling through to a
zra
2016/07/01 17:43:45
Acknowledged.
| |
1283 const Register result = locs()->out(0).reg(); | |
1284 __ LoadConstant(result, Bool::False()); | |
1285 Condition true_condition = EmitComparisonCode(compiler, labels); | |
1286 ASSERT(true_condition == NEXT_IS_TRUE); | |
1287 __ LoadConstant(result, Bool::True()); | |
1288 } | |
1289 | |
1290 | |
1291 void RelationalOpInstr::EmitBranchCode(FlowGraphCompiler* compiler, | |
1292 BranchInstr* branch) { | |
1293 BranchLabels labels = compiler->CreateBranchLabels(branch); | |
1294 Condition true_condition = EmitComparisonCode(compiler, labels); | |
1295 EmitBranchOnCondition(compiler, true_condition, labels); | |
1296 } | |
1297 | |
1298 | |
1299 EMIT_NATIVE_CODE(CheckArrayBound, 2) { | |
1300 const Register length = locs()->in(kLengthPos).reg(); | |
1301 const Register index = locs()->in(kIndexPos).reg(); | |
1302 __ IfLe(length, index); | |
Vyacheslav Egorov (Google)
2016/07/01 16:15:35
This is not correct. This does not check for negat
zra
2016/07/01 17:43:45
Do you have a preference whether we should have un
zra
2016/07/01 20:55:39
Added unsigned comparison instructions. We can rev
| |
1303 compiler->EmitDeopt(deopt_id(), | |
1304 ICData::kDeoptCheckArrayBound, | |
1305 (generalized_ ? ICData::kGeneralized : 0) | | |
1306 (licm_hoisted_ ? ICData::kHoisted : 0)); | |
1307 } | |
1308 | |
1166 } // namespace dart | 1309 } // namespace dart |
1167 | 1310 |
1168 #endif // defined TARGET_ARCH_DBC | 1311 #endif // defined TARGET_ARCH_DBC |
OLD | NEW |