Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(54)

Side by Side Diff: runtime/vm/intermediate_language_dbc.cc

Issue 2481873005: clang-format runtime/vm (Closed)
Patch Set: Merge Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/intermediate_language_arm64.cc ('k') | runtime/vm/intermediate_language_ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_DBC. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_DBC.
6 #if defined(TARGET_ARCH_DBC) 6 #if defined(TARGET_ARCH_DBC)
7 7
8 #include "vm/intermediate_language.h" 8 #include "vm/intermediate_language.h"
9 9
10 #include "vm/cpu.h" 10 #include "vm/cpu.h"
(...skipping 25 matching lines...) Expand all
36 M(DoubleToInteger) \ 36 M(DoubleToInteger) \
37 M(BoxInt64) \ 37 M(BoxInt64) \
38 M(MergedMath) \ 38 M(MergedMath) \
39 M(GuardFieldClass) \ 39 M(GuardFieldClass) \
40 M(GuardFieldLength) \ 40 M(GuardFieldLength) \
41 M(IfThenElse) \ 41 M(IfThenElse) \
42 M(ExtractNthOutput) \ 42 M(ExtractNthOutput) \
43 M(BinaryUint32Op) \ 43 M(BinaryUint32Op) \
44 M(ShiftUint32Op) \ 44 M(ShiftUint32Op) \
45 M(UnaryUint32Op) \ 45 M(UnaryUint32Op) \
46 M(UnboxedIntConverter) \ 46 M(UnboxedIntConverter)
47 47
48 // List of instructions that are not used by DBC. 48 // List of instructions that are not used by DBC.
49 // Things we aren't planning to implement for DBC: 49 // Things we aren't planning to implement for DBC:
50 // - Unboxed SIMD, 50 // - Unboxed SIMD,
51 // - Unboxed Mint, 51 // - Unboxed Mint,
52 // - Optimized RegExps, 52 // - Optimized RegExps,
53 // - Precompilation. 53 // - Precompilation.
54 #define FOR_EACH_UNREACHABLE_INSTRUCTION(M) \ 54 #define FOR_EACH_UNREACHABLE_INSTRUCTION(M) \
55 M(CaseInsensitiveCompareUC16) \ 55 M(CaseInsensitiveCompareUC16) \
56 M(GenericCheckBound) \ 56 M(GenericCheckBound) \
(...skipping 28 matching lines...) Expand all
85 M(BinaryFloat64x2Op) \ 85 M(BinaryFloat64x2Op) \
86 M(Float64x2Zero) \ 86 M(Float64x2Zero) \
87 M(Float64x2Constructor) \ 87 M(Float64x2Constructor) \
88 M(Float64x2Splat) \ 88 M(Float64x2Splat) \
89 M(Float32x4ToFloat64x2) \ 89 M(Float32x4ToFloat64x2) \
90 M(Float64x2ToFloat32x4) \ 90 M(Float64x2ToFloat32x4) \
91 M(Simd64x2Shuffle) \ 91 M(Simd64x2Shuffle) \
92 M(Float64x2ZeroArg) \ 92 M(Float64x2ZeroArg) \
93 M(Float64x2OneArg) \ 93 M(Float64x2OneArg) \
94 M(CheckedSmiOp) \ 94 M(CheckedSmiOp) \
95 M(CheckedSmiComparison) \ 95 M(CheckedSmiComparison)
96 96
97 // Location summaries actually are not used by the unoptimizing DBC compiler 97 // Location summaries actually are not used by the unoptimizing DBC compiler
98 // because we don't allocate any registers. 98 // because we don't allocate any registers.
99 static LocationSummary* CreateLocationSummary( 99 static LocationSummary* CreateLocationSummary(
100 Zone* zone, 100 Zone* zone,
101 intptr_t num_inputs, 101 intptr_t num_inputs,
102 Location output = Location::NoLocation(), 102 Location output = Location::NoLocation(),
103 LocationSummary::ContainsCall contains_call = LocationSummary::kNoCall, 103 LocationSummary::ContainsCall contains_call = LocationSummary::kNoCall,
104 intptr_t num_temps = 0) { 104 intptr_t num_temps = 0) {
105 LocationSummary* locs = new(zone) LocationSummary( 105 LocationSummary* locs =
106 zone, num_inputs, num_temps, contains_call); 106 new (zone) LocationSummary(zone, num_inputs, num_temps, contains_call);
107 for (intptr_t i = 0; i < num_inputs; i++) { 107 for (intptr_t i = 0; i < num_inputs; i++) {
108 locs->set_in(i, (contains_call == LocationSummary::kNoCall) ? 108 locs->set_in(i, (contains_call == LocationSummary::kNoCall)
109 Location::RequiresRegister() : Location::RegisterLocation(i)); 109 ? Location::RequiresRegister()
110 : Location::RegisterLocation(i));
110 } 111 }
111 for (intptr_t i = 0; i < num_temps; i++) { 112 for (intptr_t i = 0; i < num_temps; i++) {
112 locs->set_temp(i, Location::RequiresRegister()); 113 locs->set_temp(i, Location::RequiresRegister());
113 } 114 }
114 if (!output.IsInvalid()) { 115 if (!output.IsInvalid()) {
115 // For instructions that call we default to returning result in R0. 116 // For instructions that call we default to returning result in R0.
116 locs->set_out(0, output); 117 locs->set_out(0, output);
117 } 118 }
118 return locs; 119 return locs;
119 } 120 }
120 121
121 122
122 #define DEFINE_MAKE_LOCATION_SUMMARY(Name, ...) \ 123 #define DEFINE_MAKE_LOCATION_SUMMARY(Name, ...) \
123 LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \ 124 LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \
124 const { \ 125 const { \
125 return CreateLocationSummary(zone, __VA_ARGS__); \ 126 return CreateLocationSummary(zone, __VA_ARGS__); \
126 } \ 127 }
127 128
128 #define EMIT_NATIVE_CODE(Name, ...) \ 129 #define EMIT_NATIVE_CODE(Name, ...) \
129 DEFINE_MAKE_LOCATION_SUMMARY(Name, __VA_ARGS__); \ 130 DEFINE_MAKE_LOCATION_SUMMARY(Name, __VA_ARGS__); \
130 void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) \ 131 void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler)
131 132
132 #define DEFINE_UNIMPLEMENTED_MAKE_LOCATION_SUMMARY(Name) \ 133 #define DEFINE_UNIMPLEMENTED_MAKE_LOCATION_SUMMARY(Name) \
133 LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \ 134 LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \
134 const { \ 135 const { \
135 if (!opt) UNIMPLEMENTED(); \ 136 if (!opt) UNIMPLEMENTED(); \
136 return NULL; \ 137 return NULL; \
137 } \ 138 }
138 139
139 #define DEFINE_UNREACHABLE_MAKE_LOCATION_SUMMARY(Name) \ 140 #define DEFINE_UNREACHABLE_MAKE_LOCATION_SUMMARY(Name) \
140 LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \ 141 LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \
141 const { \ 142 const { \
142 UNREACHABLE(); \ 143 UNREACHABLE(); \
143 return NULL; \ 144 return NULL; \
144 } \ 145 }
145 146
146 #define DEFINE_UNIMPLEMENTED_EMIT_NATIVE_CODE(Name) \ 147 #define DEFINE_UNIMPLEMENTED_EMIT_NATIVE_CODE(Name) \
147 void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) { \ 148 void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) { \
148 UNIMPLEMENTED(); \ 149 UNIMPLEMENTED(); \
149 } 150 }
150 151
151 #define DEFINE_UNREACHABLE_EMIT_NATIVE_CODE(Name) \ 152 #define DEFINE_UNREACHABLE_EMIT_NATIVE_CODE(Name) \
152 void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) { \ 153 void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) { \
153 UNREACHABLE(); \ 154 UNREACHABLE(); \
154 } 155 }
155 156
156 #define DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(Name) \ 157 #define DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(Name) \
157 void Name##Instr::EmitBranchCode(FlowGraphCompiler*, BranchInstr*) { \ 158 void Name##Instr::EmitBranchCode(FlowGraphCompiler*, BranchInstr*) { \
158 UNIMPLEMENTED(); \ 159 UNIMPLEMENTED(); \
159 } \ 160 } \
160 Condition Name##Instr::EmitComparisonCode(FlowGraphCompiler*, \ 161 Condition Name##Instr::EmitComparisonCode(FlowGraphCompiler*, \
161 BranchLabels) { \ 162 BranchLabels) { \
162 UNIMPLEMENTED(); \ 163 UNIMPLEMENTED(); \
163 return NEXT_IS_TRUE; \ 164 return NEXT_IS_TRUE; \
164 } 165 }
165 166
166 #define DEFINE_UNIMPLEMENTED(Name) \ 167 #define DEFINE_UNIMPLEMENTED(Name) \
167 DEFINE_UNIMPLEMENTED_MAKE_LOCATION_SUMMARY(Name) \ 168 DEFINE_UNIMPLEMENTED_MAKE_LOCATION_SUMMARY(Name) \
168 DEFINE_UNIMPLEMENTED_EMIT_NATIVE_CODE(Name) \ 169 DEFINE_UNIMPLEMENTED_EMIT_NATIVE_CODE(Name)
169 170
170 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED) 171 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED)
171 172
172 #undef DEFINE_UNIMPLEMENTED 173 #undef DEFINE_UNIMPLEMENTED
173 174
174 #define DEFINE_UNREACHABLE(Name) \ 175 #define DEFINE_UNREACHABLE(Name) \
175 DEFINE_UNREACHABLE_MAKE_LOCATION_SUMMARY(Name) \ 176 DEFINE_UNREACHABLE_MAKE_LOCATION_SUMMARY(Name) \
176 DEFINE_UNREACHABLE_EMIT_NATIVE_CODE(Name) \ 177 DEFINE_UNREACHABLE_EMIT_NATIVE_CODE(Name)
177 178
178 FOR_EACH_UNREACHABLE_INSTRUCTION(DEFINE_UNREACHABLE) 179 FOR_EACH_UNREACHABLE_INSTRUCTION(DEFINE_UNREACHABLE)
179 180
180 #undef DEFINE_UNREACHABLE 181 #undef DEFINE_UNREACHABLE
181 182
182 183
183 // Only used in AOT compilation. 184 // Only used in AOT compilation.
184 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(CheckedSmiComparison) 185 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(CheckedSmiComparison)
185 186
186 187
187 EMIT_NATIVE_CODE(InstanceOf, 2, Location::SameAsFirstInput(), 188 EMIT_NATIVE_CODE(InstanceOf,
189 2,
190 Location::SameAsFirstInput(),
188 LocationSummary::kCall) { 191 LocationSummary::kCall) {
189 SubtypeTestCache& test_cache = SubtypeTestCache::Handle(); 192 SubtypeTestCache& test_cache = SubtypeTestCache::Handle();
190 if (!type().IsVoidType() && type().IsInstantiated()) { 193 if (!type().IsVoidType() && type().IsInstantiated()) {
191 test_cache = SubtypeTestCache::New(); 194 test_cache = SubtypeTestCache::New();
192 } 195 }
193 196
194 if (compiler->is_optimizing()) { 197 if (compiler->is_optimizing()) {
195 __ Push(locs()->in(0).reg()); // Value. 198 __ Push(locs()->in(0).reg()); // Value.
196 __ Push(locs()->in(1).reg()); // Instantiator type arguments. 199 __ Push(locs()->in(1).reg()); // Instantiator type arguments.
197 } 200 }
198 201
199 __ PushConstant(type()); 202 __ PushConstant(type());
200 __ PushConstant(test_cache); 203 __ PushConstant(test_cache);
201 __ InstanceOf(negate_result() ? 1 : 0); 204 __ InstanceOf(negate_result() ? 1 : 0);
202 compiler->RecordSafepoint(locs()); 205 compiler->RecordSafepoint(locs());
203 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 206 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(),
204 deopt_id(),
205 token_pos()); 207 token_pos());
206 208
207 if (compiler->is_optimizing()) { 209 if (compiler->is_optimizing()) {
208 __ PopLocal(locs()->out(0).reg()); 210 __ PopLocal(locs()->out(0).reg());
209 } 211 }
210 } 212 }
211 213
212 214
213 DEFINE_MAKE_LOCATION_SUMMARY(AssertAssignable, 2, 215 DEFINE_MAKE_LOCATION_SUMMARY(AssertAssignable,
216 2,
214 Location::SameAsFirstInput(), 217 Location::SameAsFirstInput(),
215 LocationSummary::kCall); 218 LocationSummary::kCall);
216 219
217 220
218 EMIT_NATIVE_CODE(AssertBoolean, 221 EMIT_NATIVE_CODE(AssertBoolean,
219 1, Location::SameAsFirstInput(), 222 1,
223 Location::SameAsFirstInput(),
220 LocationSummary::kCall) { 224 LocationSummary::kCall) {
221 if (compiler->is_optimizing()) { 225 if (compiler->is_optimizing()) {
222 __ Push(locs()->in(0).reg()); 226 __ Push(locs()->in(0).reg());
223 } 227 }
224 __ AssertBoolean(Isolate::Current()->type_checks() ? 1 : 0); 228 __ AssertBoolean(Isolate::Current()->type_checks() ? 1 : 0);
225 compiler->RecordSafepoint(locs()); 229 compiler->RecordSafepoint(locs());
226 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 230 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(),
227 deopt_id(),
228 token_pos()); 231 token_pos());
229 if (compiler->is_optimizing()) { 232 if (compiler->is_optimizing()) {
230 __ Drop1(); 233 __ Drop1();
231 } 234 }
232 } 235 }
233 236
234 237
235 EMIT_NATIVE_CODE(PolymorphicInstanceCall, 238 EMIT_NATIVE_CODE(PolymorphicInstanceCall,
236 0, Location::RegisterLocation(0), 239 0,
240 Location::RegisterLocation(0),
237 LocationSummary::kCall) { 241 LocationSummary::kCall) {
238 ASSERT(ic_data().NumArgsTested() == 1); 242 ASSERT(ic_data().NumArgsTested() == 1);
239 const Array& arguments_descriptor = 243 const Array& arguments_descriptor = Array::Handle(ArgumentsDescriptor::New(
240 Array::Handle(ArgumentsDescriptor::New( 244 instance_call()->ArgumentCount(), instance_call()->argument_names()));
241 instance_call()->ArgumentCount(),
242 instance_call()->argument_names()));
243 const intptr_t argdesc_kidx = __ AddConstant(arguments_descriptor); 245 const intptr_t argdesc_kidx = __ AddConstant(arguments_descriptor);
244 246
245 // Push the target onto the stack. 247 // Push the target onto the stack.
246 if (with_checks()) { 248 if (with_checks()) {
247 const intptr_t may_be_smi = 249 const intptr_t may_be_smi =
248 (ic_data().GetReceiverClassIdAt(0) == kSmiCid) ? 1 : 0; 250 (ic_data().GetReceiverClassIdAt(0) == kSmiCid) ? 1 : 0;
249 GrowableArray<CidTarget> sorted_ic_data; 251 GrowableArray<CidTarget> sorted_ic_data;
250 FlowGraphCompiler::SortICDataByCount(ic_data(), 252 FlowGraphCompiler::SortICDataByCount(ic_data(), &sorted_ic_data,
251 &sorted_ic_data,
252 /* drop_smi = */ true); 253 /* drop_smi = */ true);
253 const intptr_t sorted_length = sorted_ic_data.length(); 254 const intptr_t sorted_length = sorted_ic_data.length();
254 if (!Utils::IsUint(8, sorted_length)) { 255 if (!Utils::IsUint(8, sorted_length)) {
255 Unsupported(compiler); 256 Unsupported(compiler);
256 UNREACHABLE(); 257 UNREACHABLE();
257 } 258 }
258 __ PushPolymorphicInstanceCall( 259 __ PushPolymorphicInstanceCall(instance_call()->ArgumentCount(),
259 instance_call()->ArgumentCount(), sorted_length + may_be_smi); 260 sorted_length + may_be_smi);
260 if (may_be_smi == 1) { 261 if (may_be_smi == 1) {
261 const Function& target = Function::ZoneHandle( 262 const Function& target =
262 compiler->zone(), ic_data().GetTargetAt(0)); 263 Function::ZoneHandle(compiler->zone(), ic_data().GetTargetAt(0));
263 __ Nop(compiler->ToEmbeddableCid(kSmiCid, this)); 264 __ Nop(compiler->ToEmbeddableCid(kSmiCid, this));
264 __ Nop(__ AddConstant(target)); 265 __ Nop(__ AddConstant(target));
265 } 266 }
266 for (intptr_t i = 0; i < sorted_length; i++) { 267 for (intptr_t i = 0; i < sorted_length; i++) {
267 const Function& target = *sorted_ic_data[i].target; 268 const Function& target = *sorted_ic_data[i].target;
268 __ Nop(compiler->ToEmbeddableCid(sorted_ic_data[i].cid, this)); 269 __ Nop(compiler->ToEmbeddableCid(sorted_ic_data[i].cid, this));
269 __ Nop(__ AddConstant(target)); 270 __ Nop(__ AddConstant(target));
270 } 271 }
271 compiler->EmitDeopt( 272 compiler->EmitDeopt(deopt_id(),
272 deopt_id(), ICData::kDeoptPolymorphicInstanceCallTestFail, 0); 273 ICData::kDeoptPolymorphicInstanceCallTestFail, 0);
273 } else { 274 } else {
274 ASSERT(ic_data().HasOneTarget()); 275 ASSERT(ic_data().HasOneTarget());
275 const Function& target = Function::ZoneHandle(ic_data().GetTargetAt(0)); 276 const Function& target = Function::ZoneHandle(ic_data().GetTargetAt(0));
276 __ PushConstant(target); 277 __ PushConstant(target);
277 } 278 }
278 279
279 // Call the function. 280 // Call the function.
280 __ StaticCall(instance_call()->ArgumentCount(), argdesc_kidx); 281 __ StaticCall(instance_call()->ArgumentCount(), argdesc_kidx);
281 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 282 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(),
282 deopt_id(), instance_call()->token_pos()); 283 instance_call()->token_pos());
283 compiler->RecordAfterCall(this); 284 compiler->RecordAfterCall(this);
284 __ PopLocal(locs()->out(0).reg()); 285 __ PopLocal(locs()->out(0).reg());
285 } 286 }
286 287
287 288
288 EMIT_NATIVE_CODE(Stop, 0) { 289 EMIT_NATIVE_CODE(Stop, 0) {
289 __ Stop(message()); 290 __ Stop(message());
290 } 291 }
291 292
292 293
293 EMIT_NATIVE_CODE(CheckStackOverflow, 294 EMIT_NATIVE_CODE(CheckStackOverflow,
294 0, Location::NoLocation(), 295 0,
296 Location::NoLocation(),
295 LocationSummary::kCall) { 297 LocationSummary::kCall) {
296 __ CheckStack(); 298 __ CheckStack();
297 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 299 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(),
298 deopt_id(),
299 token_pos()); 300 token_pos());
300 compiler->RecordAfterCall(this); 301 compiler->RecordAfterCall(this);
301 } 302 }
302 303
303 304
304 EMIT_NATIVE_CODE(PushArgument, 1) { 305 EMIT_NATIVE_CODE(PushArgument, 1) {
305 if (compiler->is_optimizing()) { 306 if (compiler->is_optimizing()) {
306 __ Push(locs()->in(0).reg()); 307 __ Push(locs()->in(0).reg());
307 } 308 }
308 } 309 }
309 310
310 311
311 EMIT_NATIVE_CODE(LoadLocal, 0) { 312 EMIT_NATIVE_CODE(LoadLocal, 0) {
312 ASSERT(!compiler->is_optimizing()); 313 ASSERT(!compiler->is_optimizing());
313 ASSERT(local().index() != 0); 314 ASSERT(local().index() != 0);
314 __ Push((local().index() > 0) ? (-local().index()) : (-local().index() - 1)); 315 __ Push((local().index() > 0) ? (-local().index()) : (-local().index() - 1));
315 } 316 }
316 317
317 318
318 EMIT_NATIVE_CODE(StoreLocal, 0) { 319 EMIT_NATIVE_CODE(StoreLocal, 0) {
319 ASSERT(!compiler->is_optimizing()); 320 ASSERT(!compiler->is_optimizing());
320 ASSERT(local().index() != 0); 321 ASSERT(local().index() != 0);
321 if (HasTemp()) { 322 if (HasTemp()) {
322 __ StoreLocal( 323 __ StoreLocal((local().index() > 0) ? (-local().index())
323 (local().index() > 0) ? (-local().index()) : (-local().index() - 1)); 324 : (-local().index() - 1));
324 } else { 325 } else {
325 __ PopLocal( 326 __ PopLocal((local().index() > 0) ? (-local().index())
326 (local().index() > 0) ? (-local().index()) : (-local().index() - 1)); 327 : (-local().index() - 1));
327 } 328 }
328 } 329 }
329 330
330 331
331 EMIT_NATIVE_CODE(LoadClassId, 1, Location::RequiresRegister()) { 332 EMIT_NATIVE_CODE(LoadClassId, 1, Location::RequiresRegister()) {
332 if (compiler->is_optimizing()) { 333 if (compiler->is_optimizing()) {
333 __ LoadClassId(locs()->out(0).reg(), locs()->in(0).reg()); 334 __ LoadClassId(locs()->out(0).reg(), locs()->in(0).reg());
334 } else { 335 } else {
335 __ LoadClassIdTOS(); 336 __ LoadClassIdTOS();
336 } 337 }
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
369 370
370 EMIT_NATIVE_CODE(Return, 1) { 371 EMIT_NATIVE_CODE(Return, 1) {
371 if (compiler->is_optimizing()) { 372 if (compiler->is_optimizing()) {
372 __ Return(locs()->in(0).reg()); 373 __ Return(locs()->in(0).reg());
373 } else { 374 } else {
374 __ ReturnTOS(); 375 __ ReturnTOS();
375 } 376 }
376 } 377 }
377 378
378 379
379 LocationSummary* StoreStaticFieldInstr::MakeLocationSummary( 380 LocationSummary* StoreStaticFieldInstr::MakeLocationSummary(Zone* zone,
380 Zone* zone, bool opt) const { 381 bool opt) const {
381 const intptr_t kNumInputs = 1; 382 const intptr_t kNumInputs = 1;
382 const intptr_t kNumTemps = 1; 383 const intptr_t kNumTemps = 1;
383 LocationSummary* locs = new(zone) LocationSummary( 384 LocationSummary* locs = new (zone)
384 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); 385 LocationSummary(zone, kNumInputs, kNumTemps, LocationSummary::kNoCall);
385 for (intptr_t i = 0; i < kNumInputs; i++) { 386 for (intptr_t i = 0; i < kNumInputs; i++) {
386 locs->set_in(i, Location::RequiresRegister()); 387 locs->set_in(i, Location::RequiresRegister());
387 } 388 }
388 for (intptr_t i = 0; i < kNumTemps; i++) { 389 for (intptr_t i = 0; i < kNumTemps; i++) {
389 locs->set_temp(i, Location::RequiresRegister()); 390 locs->set_temp(i, Location::RequiresRegister());
390 } 391 }
391 return locs; 392 return locs;
392 } 393 }
393 394
394 395
395 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 396 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
396 if (compiler->is_optimizing()) { 397 if (compiler->is_optimizing()) {
397 __ LoadConstant(locs()->temp(0).reg(), 398 __ LoadConstant(locs()->temp(0).reg(),
398 Field::ZoneHandle(field().Original())); 399 Field::ZoneHandle(field().Original()));
399 __ StoreField(locs()->temp(0).reg(), 400 __ StoreField(locs()->temp(0).reg(),
400 Field::static_value_offset() / kWordSize, 401 Field::static_value_offset() / kWordSize,
401 locs()->in(0).reg()); 402 locs()->in(0).reg());
402 } else { 403 } else {
403 const intptr_t kidx = __ AddConstant(field()); 404 const intptr_t kidx = __ AddConstant(field());
404 __ StoreStaticTOS(kidx); 405 __ StoreStaticTOS(kidx);
405 } 406 }
406 } 407 }
407 408
408 409
409 EMIT_NATIVE_CODE(LoadStaticField, 1, Location::RequiresRegister()) { 410 EMIT_NATIVE_CODE(LoadStaticField, 1, Location::RequiresRegister()) {
410 if (compiler->is_optimizing()) { 411 if (compiler->is_optimizing()) {
411 __ LoadField(locs()->out(0).reg(), 412 __ LoadField(locs()->out(0).reg(), locs()->in(0).reg(),
412 locs()->in(0).reg(),
413 Field::static_value_offset() / kWordSize); 413 Field::static_value_offset() / kWordSize);
414 } else { 414 } else {
415 const intptr_t kidx = __ AddConstant(StaticField()); 415 const intptr_t kidx = __ AddConstant(StaticField());
416 __ PushStatic(kidx); 416 __ PushStatic(kidx);
417 } 417 }
418 } 418 }
419 419
420 420
421 EMIT_NATIVE_CODE(InitStaticField, 1) { 421 EMIT_NATIVE_CODE(InitStaticField, 1) {
422 if (compiler->is_optimizing()) { 422 if (compiler->is_optimizing()) {
423 __ Push(locs()->in(0).reg()); 423 __ Push(locs()->in(0).reg());
424 __ InitStaticTOS(); 424 __ InitStaticTOS();
425 } else { 425 } else {
426 __ InitStaticTOS(); 426 __ InitStaticTOS();
427 } 427 }
428 } 428 }
429 429
430 430
431 EMIT_NATIVE_CODE(ClosureCall, 431 EMIT_NATIVE_CODE(ClosureCall,
432 1, 432 1,
433 Location::RegisterLocation(0), 433 Location::RegisterLocation(0),
434 LocationSummary::kCall) { 434 LocationSummary::kCall) {
435 if (compiler->is_optimizing()) { 435 if (compiler->is_optimizing()) {
436 __ Push(locs()->in(0).reg()); 436 __ Push(locs()->in(0).reg());
437 } 437 }
438 438
439 intptr_t argument_count = ArgumentCount(); 439 intptr_t argument_count = ArgumentCount();
440 const Array& arguments_descriptor = 440 const Array& arguments_descriptor = Array::ZoneHandle(
441 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, 441 ArgumentsDescriptor::New(argument_count, argument_names()));
442 argument_names()));
443 const intptr_t argdesc_kidx = 442 const intptr_t argdesc_kidx =
444 compiler->assembler()->AddConstant(arguments_descriptor); 443 compiler->assembler()->AddConstant(arguments_descriptor);
445 __ StaticCall(argument_count, argdesc_kidx); 444 __ StaticCall(argument_count, argdesc_kidx);
446 compiler->RecordAfterCall(this); 445 compiler->RecordAfterCall(this);
447 if (compiler->is_optimizing()) { 446 if (compiler->is_optimizing()) {
448 __ PopLocal(locs()->out(0).reg()); 447 __ PopLocal(locs()->out(0).reg());
449 } 448 }
450 } 449 }
451 450
452 451
(...skipping 21 matching lines...) Expand all
474 // If we aren't falling through to the true case, then we have to do 473 // If we aren't falling through to the true case, then we have to do
475 // a Jump to it here. 474 // a Jump to it here.
476 __ Jump(labels.true_label); 475 __ Jump(labels.true_label);
477 } 476 }
478 } 477 }
479 } 478 }
480 479
481 480
482 Condition StrictCompareInstr::EmitComparisonCode(FlowGraphCompiler* compiler, 481 Condition StrictCompareInstr::EmitComparisonCode(FlowGraphCompiler* compiler,
483 BranchLabels labels) { 482 BranchLabels labels) {
484 ASSERT((kind() == Token::kNE_STRICT) || 483 ASSERT((kind() == Token::kNE_STRICT) || (kind() == Token::kEQ_STRICT));
485 (kind() == Token::kEQ_STRICT));
486 484
487 Token::Kind comparison; 485 Token::Kind comparison;
488 Condition condition; 486 Condition condition;
489 if (labels.fall_through == labels.false_label) { 487 if (labels.fall_through == labels.false_label) {
490 condition = NEXT_IS_TRUE; 488 condition = NEXT_IS_TRUE;
491 comparison = kind(); 489 comparison = kind();
492 } else { 490 } else {
493 // Flip comparison to save a jump. 491 // Flip comparison to save a jump.
494 condition = NEXT_IS_FALSE; 492 condition = NEXT_IS_FALSE;
495 comparison = (kind() == Token::kEQ_STRICT) ? Token::kNE_STRICT 493 comparison =
496 : Token::kEQ_STRICT; 494 (kind() == Token::kEQ_STRICT) ? Token::kNE_STRICT : Token::kEQ_STRICT;
497 } 495 }
498 496
499 if (!compiler->is_optimizing()) { 497 if (!compiler->is_optimizing()) {
500 const Bytecode::Opcode eq_op = needs_number_check() ? 498 const Bytecode::Opcode eq_op = needs_number_check()
501 Bytecode::kIfEqStrictNumTOS : Bytecode::kIfEqStrictTOS; 499 ? Bytecode::kIfEqStrictNumTOS
502 const Bytecode::Opcode ne_op = needs_number_check() ? 500 : Bytecode::kIfEqStrictTOS;
503 Bytecode::kIfNeStrictNumTOS : Bytecode::kIfNeStrictTOS; 501 const Bytecode::Opcode ne_op = needs_number_check()
502 ? Bytecode::kIfNeStrictNumTOS
503 : Bytecode::kIfNeStrictTOS;
504 __ Emit(comparison == Token::kEQ_STRICT ? eq_op : ne_op); 504 __ Emit(comparison == Token::kEQ_STRICT ? eq_op : ne_op);
505 } else { 505 } else {
506 const Bytecode::Opcode eq_op = needs_number_check() ? 506 const Bytecode::Opcode eq_op =
507 Bytecode::kIfEqStrictNum : Bytecode::kIfEqStrict; 507 needs_number_check() ? Bytecode::kIfEqStrictNum : Bytecode::kIfEqStrict;
508 const Bytecode::Opcode ne_op = needs_number_check() ? 508 const Bytecode::Opcode ne_op =
509 Bytecode::kIfNeStrictNum : Bytecode::kIfNeStrict; 509 needs_number_check() ? Bytecode::kIfNeStrictNum : Bytecode::kIfNeStrict;
510 __ Emit(Bytecode::Encode( 510 __ Emit(Bytecode::Encode((comparison == Token::kEQ_STRICT) ? eq_op : ne_op,
511 (comparison == Token::kEQ_STRICT) ? eq_op : ne_op, 511 locs()->in(0).reg(), locs()->in(1).reg()));
512 locs()->in(0).reg(),
513 locs()->in(1).reg()));
514 } 512 }
515 513
516 if (needs_number_check() && token_pos().IsReal()) { 514 if (needs_number_check() && token_pos().IsReal()) {
517 compiler->RecordSafepoint(locs()); 515 compiler->RecordSafepoint(locs());
518 compiler->AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, 516 compiler->AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall,
519 Thread::kNoDeoptId, 517 Thread::kNoDeoptId, token_pos());
520 token_pos());
521 } 518 }
522 519
523 return condition; 520 return condition;
524 } 521 }
525 522
526 523
527 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, 524 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler,
528 BranchInstr* branch) { 525 BranchInstr* branch) {
529 ASSERT((kind() == Token::kEQ_STRICT) || 526 ASSERT((kind() == Token::kEQ_STRICT) || (kind() == Token::kNE_STRICT));
530 (kind() == Token::kNE_STRICT));
531 527
532 BranchLabels labels = compiler->CreateBranchLabels(branch); 528 BranchLabels labels = compiler->CreateBranchLabels(branch);
533 Condition true_condition = EmitComparisonCode(compiler, labels); 529 Condition true_condition = EmitComparisonCode(compiler, labels);
534 EmitBranchOnCondition(compiler, true_condition, labels); 530 EmitBranchOnCondition(compiler, true_condition, labels);
535 } 531 }
536 532
537 533
538 EMIT_NATIVE_CODE(StrictCompare, 534 EMIT_NATIVE_CODE(StrictCompare,
539 2, 535 2,
540 Location::RequiresRegister(), 536 Location::RequiresRegister(),
541 needs_number_check() ? LocationSummary::kCall 537 needs_number_check() ? LocationSummary::kCall
542 : LocationSummary::kNoCall) { 538 : LocationSummary::kNoCall) {
543 ASSERT((kind() == Token::kEQ_STRICT) || 539 ASSERT((kind() == Token::kEQ_STRICT) || (kind() == Token::kNE_STRICT));
544 (kind() == Token::kNE_STRICT));
545 540
546 Label is_true, is_false; 541 Label is_true, is_false;
547 BranchLabels labels = { &is_true, &is_false, &is_false }; 542 BranchLabels labels = {&is_true, &is_false, &is_false};
548 Condition true_condition = EmitComparisonCode(compiler, labels); 543 Condition true_condition = EmitComparisonCode(compiler, labels);
549 EmitBranchOnCondition(compiler, true_condition, labels); 544 EmitBranchOnCondition(compiler, true_condition, labels);
550 Label done; 545 Label done;
551 if (compiler->is_optimizing()) { 546 if (compiler->is_optimizing()) {
552 const Register result = locs()->out(0).reg(); 547 const Register result = locs()->out(0).reg();
553 __ Bind(&is_false); 548 __ Bind(&is_false);
554 __ LoadConstant(result, Bool::False()); 549 __ LoadConstant(result, Bool::False());
555 __ Jump(&done); 550 __ Jump(&done);
556 __ Bind(&is_true); 551 __ Bind(&is_true);
557 __ LoadConstant(result, Bool::True()); 552 __ LoadConstant(result, Bool::True());
558 __ Bind(&done); 553 __ Bind(&done);
559 } else { 554 } else {
560 __ Bind(&is_false); 555 __ Bind(&is_false);
561 __ PushConstant(Bool::False()); 556 __ PushConstant(Bool::False());
562 __ Jump(&done); 557 __ Jump(&done);
563 __ Bind(&is_true); 558 __ Bind(&is_true);
564 __ PushConstant(Bool::True()); 559 __ PushConstant(Bool::True());
565 __ Bind(&done); 560 __ Bind(&done);
566 } 561 }
567 } 562 }
568 563
569 564
570 LocationSummary* BranchInstr::MakeLocationSummary(Zone* zone, 565 LocationSummary* BranchInstr::MakeLocationSummary(Zone* zone, bool opt) const {
571 bool opt) const {
572 comparison()->InitializeLocationSummary(zone, opt); 566 comparison()->InitializeLocationSummary(zone, opt);
573 if (!comparison()->HasLocs()) { 567 if (!comparison()->HasLocs()) {
574 return NULL; 568 return NULL;
575 } 569 }
576 // Branches don't produce a result. 570 // Branches don't produce a result.
577 comparison()->locs()->set_out(0, Location::NoLocation()); 571 comparison()->locs()->set_out(0, Location::NoLocation());
578 return comparison()->locs(); 572 return comparison()->locs();
579 } 573 }
580 574
581 575
582 void BranchInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 576 void BranchInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
583 comparison()->EmitBranchCode(compiler, this); 577 comparison()->EmitBranchCode(compiler, this);
584 } 578 }
585 579
586 580
587 EMIT_NATIVE_CODE(Goto, 0) { 581 EMIT_NATIVE_CODE(Goto, 0) {
588 if (!compiler->is_optimizing()) { 582 if (!compiler->is_optimizing()) {
589 // Add a deoptimization descriptor for deoptimizing instructions that 583 // Add a deoptimization descriptor for deoptimizing instructions that
590 // may be inserted before this instruction. 584 // may be inserted before this instruction.
591 compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt, 585 compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt, GetDeoptId(),
592 GetDeoptId(),
593 TokenPosition::kNoSource); 586 TokenPosition::kNoSource);
594 } 587 }
595 if (HasParallelMove()) { 588 if (HasParallelMove()) {
596 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); 589 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
597 } 590 }
598 // We can fall through if the successor is the next block in the list. 591 // We can fall through if the successor is the next block in the list.
599 // Otherwise, we need a jump. 592 // Otherwise, we need a jump.
600 if (!compiler->CanFallThroughTo(successor())) { 593 if (!compiler->CanFallThroughTo(successor())) {
601 __ Jump(compiler->GetJumpLabel(successor())); 594 __ Jump(compiler->GetJumpLabel(successor()));
602 } 595 }
603 } 596 }
604 597
605 598
606 Condition TestSmiInstr::EmitComparisonCode(FlowGraphCompiler* compiler, 599 Condition TestSmiInstr::EmitComparisonCode(FlowGraphCompiler* compiler,
607 BranchLabels labels) { 600 BranchLabels labels) {
608 ASSERT((kind() == Token::kEQ) || 601 ASSERT((kind() == Token::kEQ) || (kind() == Token::kNE));
609 (kind() == Token::kNE));
610 Register left = locs()->in(0).reg(); 602 Register left = locs()->in(0).reg();
611 Register right = locs()->in(1).reg(); 603 Register right = locs()->in(1).reg();
612 __ TestSmi(left, right); 604 __ TestSmi(left, right);
613 return (kind() == Token::kEQ) ? NEXT_IS_TRUE : NEXT_IS_FALSE; 605 return (kind() == Token::kEQ) ? NEXT_IS_TRUE : NEXT_IS_FALSE;
614 } 606 }
615 607
616 608
617 void TestSmiInstr::EmitBranchCode(FlowGraphCompiler* compiler, 609 void TestSmiInstr::EmitBranchCode(FlowGraphCompiler* compiler,
618 BranchInstr* branch) { 610 BranchInstr* branch) {
619 BranchLabels labels = compiler->CreateBranchLabels(branch); 611 BranchLabels labels = compiler->CreateBranchLabels(branch);
620 Condition true_condition = EmitComparisonCode(compiler, labels); 612 Condition true_condition = EmitComparisonCode(compiler, labels);
621 EmitBranchOnCondition(compiler, true_condition, labels); 613 EmitBranchOnCondition(compiler, true_condition, labels);
622 } 614 }
623 615
624 616
625 EMIT_NATIVE_CODE(TestSmi, 617 EMIT_NATIVE_CODE(TestSmi,
626 2, 618 2,
627 Location::RequiresRegister(), 619 Location::RequiresRegister(),
628 LocationSummary::kNoCall) { 620 LocationSummary::kNoCall) {
629 // Never emitted outside of the BranchInstr. 621 // Never emitted outside of the BranchInstr.
630 UNREACHABLE(); 622 UNREACHABLE();
631 } 623 }
632 624
633 625
634 Condition TestCidsInstr::EmitComparisonCode(FlowGraphCompiler* compiler, 626 Condition TestCidsInstr::EmitComparisonCode(FlowGraphCompiler* compiler,
635 BranchLabels labels) { 627 BranchLabels labels) {
636 ASSERT((kind() == Token::kIS) || (kind() == Token::kISNOT)); 628 ASSERT((kind() == Token::kIS) || (kind() == Token::kISNOT));
637 const Register value = locs()->in(0).reg(); 629 const Register value = locs()->in(0).reg();
638 const intptr_t true_result = (kind() == Token::kIS) ? 1 : 0; 630 const intptr_t true_result = (kind() == Token::kIS) ? 1 : 0;
639 631
640 const ZoneGrowableArray<intptr_t>& data = cid_results(); 632 const ZoneGrowableArray<intptr_t>& data = cid_results();
641 const intptr_t num_cases = data.length() / 2; 633 const intptr_t num_cases = data.length() / 2;
642 ASSERT(num_cases <= 255); 634 ASSERT(num_cases <= 255);
643 __ TestCids(value, num_cases); 635 __ TestCids(value, num_cases);
644 636
645 bool result = false; 637 bool result = false;
646 for (intptr_t i = 0; i < data.length(); i += 2) { 638 for (intptr_t i = 0; i < data.length(); i += 2) {
647 const intptr_t test_cid = data[i]; 639 const intptr_t test_cid = data[i];
648 result = data[i + 1] == true_result; 640 result = data[i + 1] == true_result;
649 __ Nop(result ? 1 : 0, compiler->ToEmbeddableCid(test_cid, this)); 641 __ Nop(result ? 1 : 0, compiler->ToEmbeddableCid(test_cid, this));
650 } 642 }
651 643
652 // No match found, deoptimize or false. 644 // No match found, deoptimize or false.
653 if (CanDeoptimize()) { 645 if (CanDeoptimize()) {
654 compiler->EmitDeopt(deopt_id(), 646 compiler->EmitDeopt(deopt_id(), ICData::kDeoptTestCids,
655 ICData::kDeoptTestCids,
656 licm_hoisted_ ? ICData::kHoisted : 0); 647 licm_hoisted_ ? ICData::kHoisted : 0);
657 } else { 648 } else {
658 Label* target = result ? labels.false_label : labels.true_label; 649 Label* target = result ? labels.false_label : labels.true_label;
659 __ Jump(target); 650 __ Jump(target);
660 } 651 }
661 652
662 return NEXT_IS_TRUE; 653 return NEXT_IS_TRUE;
663 } 654 }
664 655
665 656
666 void TestCidsInstr::EmitBranchCode(FlowGraphCompiler* compiler, 657 void TestCidsInstr::EmitBranchCode(FlowGraphCompiler* compiler,
667 BranchInstr* branch) { 658 BranchInstr* branch) {
668 BranchLabels labels = compiler->CreateBranchLabels(branch); 659 BranchLabels labels = compiler->CreateBranchLabels(branch);
669 Condition true_condition = EmitComparisonCode(compiler, labels); 660 Condition true_condition = EmitComparisonCode(compiler, labels);
670 EmitBranchOnCondition(compiler, true_condition, labels); 661 EmitBranchOnCondition(compiler, true_condition, labels);
671 } 662 }
672 663
673 664
674 EMIT_NATIVE_CODE(TestCids, 1, Location::RequiresRegister(), 665 EMIT_NATIVE_CODE(TestCids,
666 1,
667 Location::RequiresRegister(),
675 LocationSummary::kNoCall) { 668 LocationSummary::kNoCall) {
676 Register result_reg = locs()->out(0).reg(); 669 Register result_reg = locs()->out(0).reg();
677 Label is_true, is_false, done; 670 Label is_true, is_false, done;
678 BranchLabels labels = { &is_true, &is_false, &is_false }; 671 BranchLabels labels = {&is_true, &is_false, &is_false};
679 EmitComparisonCode(compiler, labels); 672 EmitComparisonCode(compiler, labels);
680 __ Jump(&is_true); 673 __ Jump(&is_true);
681 __ Bind(&is_false); 674 __ Bind(&is_false);
682 __ LoadConstant(result_reg, Bool::False()); 675 __ LoadConstant(result_reg, Bool::False());
683 __ Jump(&done); 676 __ Jump(&done);
684 __ Bind(&is_true); 677 __ Bind(&is_true);
685 __ LoadConstant(result_reg, Bool::True()); 678 __ LoadConstant(result_reg, Bool::True());
686 __ Bind(&done); 679 __ Bind(&done);
687 } 680 }
688 681
689 682
690 EMIT_NATIVE_CODE(CreateArray, 683 EMIT_NATIVE_CODE(CreateArray,
691 2, Location::RequiresRegister(), 684 2,
685 Location::RequiresRegister(),
692 LocationSummary::kCall) { 686 LocationSummary::kCall) {
693 if (compiler->is_optimizing()) { 687 if (compiler->is_optimizing()) {
694 const Register length = locs()->in(kLengthPos).reg(); 688 const Register length = locs()->in(kLengthPos).reg();
695 const Register type_arguments = locs()->in(kElementTypePos).reg(); 689 const Register type_arguments = locs()->in(kElementTypePos).reg();
696 const Register out = locs()->out(0).reg(); 690 const Register out = locs()->out(0).reg();
697 __ CreateArrayOpt(out, length, type_arguments); 691 __ CreateArrayOpt(out, length, type_arguments);
698 __ Push(type_arguments); 692 __ Push(type_arguments);
699 __ Push(length); 693 __ Push(length);
700 __ CreateArrayTOS(); 694 __ CreateArrayTOS();
701 compiler->RecordSafepoint(locs()); 695 compiler->RecordSafepoint(locs());
702 __ PopLocal(out); 696 __ PopLocal(out);
703 } else { 697 } else {
704 __ CreateArrayTOS(); 698 __ CreateArrayTOS();
705 compiler->RecordSafepoint(locs()); 699 compiler->RecordSafepoint(locs());
706 } 700 }
707 } 701 }
708 702
709 703
710 EMIT_NATIVE_CODE(StoreIndexed, 3, Location::NoLocation(), 704 EMIT_NATIVE_CODE(StoreIndexed,
711 LocationSummary::kNoCall, 1) { 705 3,
706 Location::NoLocation(),
707 LocationSummary::kNoCall,
708 1) {
712 if (!compiler->is_optimizing()) { 709 if (!compiler->is_optimizing()) {
713 ASSERT(class_id() == kArrayCid); 710 ASSERT(class_id() == kArrayCid);
714 __ StoreIndexedTOS(); 711 __ StoreIndexedTOS();
715 return; 712 return;
716 } 713 }
717 const Register array = locs()->in(kArrayPos).reg(); 714 const Register array = locs()->in(kArrayPos).reg();
718 const Register index = locs()->in(kIndexPos).reg(); 715 const Register index = locs()->in(kIndexPos).reg();
719 const Register value = locs()->in(kValuePos).reg(); 716 const Register value = locs()->in(kValuePos).reg();
720 const Register temp = locs()->temp(0).reg(); 717 const Register temp = locs()->temp(0).reg();
721 switch (class_id()) { 718 switch (class_id()) {
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
780 } 777 }
781 break; 778 break;
782 default: 779 default:
783 Unsupported(compiler); 780 Unsupported(compiler);
784 UNREACHABLE(); 781 UNREACHABLE();
785 break; 782 break;
786 } 783 }
787 } 784 }
788 785
789 786
790 EMIT_NATIVE_CODE(LoadIndexed, 2, Location::RequiresRegister(), 787 EMIT_NATIVE_CODE(LoadIndexed,
791 LocationSummary::kNoCall, 1) { 788 2,
789 Location::RequiresRegister(),
790 LocationSummary::kNoCall,
791 1) {
792 ASSERT(compiler->is_optimizing()); 792 ASSERT(compiler->is_optimizing());
793 const Register array = locs()->in(0).reg(); 793 const Register array = locs()->in(0).reg();
794 const Register index = locs()->in(1).reg(); 794 const Register index = locs()->in(1).reg();
795 const Register temp = locs()->temp(0).reg(); 795 const Register temp = locs()->temp(0).reg();
796 const Register result = locs()->out(0).reg(); 796 const Register result = locs()->out(0).reg();
797 switch (class_id()) { 797 switch (class_id()) {
798 case kArrayCid: 798 case kArrayCid:
799 case kImmutableArrayCid: 799 case kImmutableArrayCid:
800 __ LoadIndexed(result, array, index); 800 __ LoadIndexed(result, array, index);
801 break; 801 break;
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
891 break; 891 break;
892 default: 892 default:
893 Unsupported(compiler); 893 Unsupported(compiler);
894 UNREACHABLE(); 894 UNREACHABLE();
895 break; 895 break;
896 } 896 }
897 } 897 }
898 898
899 899
900 EMIT_NATIVE_CODE(StringInterpolate, 900 EMIT_NATIVE_CODE(StringInterpolate,
901 1, Location::RegisterLocation(0), 901 1,
902 Location::RegisterLocation(0),
902 LocationSummary::kCall) { 903 LocationSummary::kCall) {
903 if (compiler->is_optimizing()) { 904 if (compiler->is_optimizing()) {
904 __ Push(locs()->in(0).reg()); 905 __ Push(locs()->in(0).reg());
905 } 906 }
906 const intptr_t kArgumentCount = 1; 907 const intptr_t kArgumentCount = 1;
907 const Array& arguments_descriptor = Array::Handle( 908 const Array& arguments_descriptor = Array::Handle(
908 ArgumentsDescriptor::New(kArgumentCount, Object::null_array())); 909 ArgumentsDescriptor::New(kArgumentCount, Object::null_array()));
909 __ PushConstant(CallFunction()); 910 __ PushConstant(CallFunction());
910 const intptr_t argdesc_kidx = __ AddConstant(arguments_descriptor); 911 const intptr_t argdesc_kidx = __ AddConstant(arguments_descriptor);
911 __ StaticCall(kArgumentCount, argdesc_kidx); 912 __ StaticCall(kArgumentCount, argdesc_kidx);
912 compiler->RecordAfterCall(this); 913 compiler->RecordAfterCall(this);
913 if (compiler->is_optimizing()) { 914 if (compiler->is_optimizing()) {
914 __ PopLocal(locs()->out(0).reg()); 915 __ PopLocal(locs()->out(0).reg());
915 } 916 }
916 } 917 }
917 918
918 919
919 EMIT_NATIVE_CODE(NativeCall, 920 EMIT_NATIVE_CODE(NativeCall,
920 0, Location::NoLocation(), 921 0,
922 Location::NoLocation(),
921 LocationSummary::kCall) { 923 LocationSummary::kCall) {
922 SetupNative(); 924 SetupNative();
923 925
924 const intptr_t argc_tag = NativeArguments::ComputeArgcTag(function()); 926 const intptr_t argc_tag = NativeArguments::ComputeArgcTag(function());
925 927
926 ASSERT(!link_lazily()); 928 ASSERT(!link_lazily());
927 const ExternalLabel label(reinterpret_cast<uword>(native_c_function())); 929 const ExternalLabel label(reinterpret_cast<uword>(native_c_function()));
928 const intptr_t target_kidx = 930 const intptr_t target_kidx =
929 __ object_pool_wrapper().FindNativeEntry(&label, kNotPatchable); 931 __ object_pool_wrapper().FindNativeEntry(&label, kNotPatchable);
930 const intptr_t argc_tag_kidx = 932 const intptr_t argc_tag_kidx =
931 __ object_pool_wrapper().FindImmediate(static_cast<uword>(argc_tag)); 933 __ object_pool_wrapper().FindImmediate(static_cast<uword>(argc_tag));
932 __ PushConstant(target_kidx); 934 __ PushConstant(target_kidx);
933 __ PushConstant(argc_tag_kidx); 935 __ PushConstant(argc_tag_kidx);
934 if (is_bootstrap_native()) { 936 if (is_bootstrap_native()) {
935 __ NativeBootstrapCall(); 937 __ NativeBootstrapCall();
936 } else { 938 } else {
937 __ NativeCall(); 939 __ NativeCall();
938 } 940 }
939 compiler->RecordSafepoint(locs()); 941 compiler->RecordSafepoint(locs());
940 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 942 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId,
941 Thread::kNoDeoptId,
942 token_pos()); 943 token_pos());
943 } 944 }
944 945
945 946
946 EMIT_NATIVE_CODE(OneByteStringFromCharCode, 947 EMIT_NATIVE_CODE(OneByteStringFromCharCode,
947 1, Location::RequiresRegister(), 948 1,
949 Location::RequiresRegister(),
948 LocationSummary::kNoCall) { 950 LocationSummary::kNoCall) {
949 ASSERT(compiler->is_optimizing()); 951 ASSERT(compiler->is_optimizing());
950 const Register char_code = locs()->in(0).reg(); // Char code is a smi. 952 const Register char_code = locs()->in(0).reg(); // Char code is a smi.
951 const Register result = locs()->out(0).reg(); 953 const Register result = locs()->out(0).reg();
952 __ OneByteStringFromCharCode(result, char_code); 954 __ OneByteStringFromCharCode(result, char_code);
953 } 955 }
954 956
955 957
956 EMIT_NATIVE_CODE(StringToCharCode, 958 EMIT_NATIVE_CODE(StringToCharCode,
957 1, Location::RequiresRegister(), 959 1,
960 Location::RequiresRegister(),
958 LocationSummary::kNoCall) { 961 LocationSummary::kNoCall) {
959 ASSERT(cid_ == kOneByteStringCid); 962 ASSERT(cid_ == kOneByteStringCid);
960 const Register str = locs()->in(0).reg(); 963 const Register str = locs()->in(0).reg();
961 const Register result = locs()->out(0).reg(); // Result char code is a smi. 964 const Register result = locs()->out(0).reg(); // Result char code is a smi.
962 __ StringToCharCode(result, str); 965 __ StringToCharCode(result, str);
963 } 966 }
964 967
965 968
966 EMIT_NATIVE_CODE(AllocateObject, 969 EMIT_NATIVE_CODE(AllocateObject,
967 0, Location::RequiresRegister(), 970 0,
971 Location::RequiresRegister(),
968 LocationSummary::kCall) { 972 LocationSummary::kCall) {
969 if (ArgumentCount() == 1) { 973 if (ArgumentCount() == 1) {
970 // Allocate with type arguments. 974 // Allocate with type arguments.
971 if (compiler->is_optimizing()) { 975 if (compiler->is_optimizing()) {
972 // If we're optimizing, try a streamlined fastpath. 976 // If we're optimizing, try a streamlined fastpath.
973 const intptr_t instance_size = cls().instance_size(); 977 const intptr_t instance_size = cls().instance_size();
974 Isolate* isolate = Isolate::Current(); 978 Isolate* isolate = Isolate::Current();
975 if (Heap::IsAllocatableInNewSpace(instance_size) && 979 if (Heap::IsAllocatableInNewSpace(instance_size) &&
976 !cls().TraceAllocation(isolate)) { 980 !cls().TraceAllocation(isolate)) {
977 uword tags = 0; 981 uword tags = 0;
978 tags = RawObject::SizeTag::update(instance_size, tags); 982 tags = RawObject::SizeTag::update(instance_size, tags);
979 ASSERT(cls().id() != kIllegalCid); 983 ASSERT(cls().id() != kIllegalCid);
980 tags = RawObject::ClassIdTag::update(cls().id(), tags); 984 tags = RawObject::ClassIdTag::update(cls().id(), tags);
981 if (Smi::IsValid(tags)) { 985 if (Smi::IsValid(tags)) {
982 const intptr_t tags_kidx = __ AddConstant( 986 const intptr_t tags_kidx =
983 Smi::Handle(Smi::New(tags))); 987 __ AddConstant(Smi::Handle(Smi::New(tags)));
984 __ AllocateTOpt(locs()->out(0).reg(), tags_kidx); 988 __ AllocateTOpt(locs()->out(0).reg(), tags_kidx);
985 __ Nop(cls().type_arguments_field_offset()); 989 __ Nop(cls().type_arguments_field_offset());
986 } 990 }
987 } 991 }
988 __ PushConstant(cls()); 992 __ PushConstant(cls());
989 __ AllocateT(); 993 __ AllocateT();
990 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 994 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
991 Thread::kNoDeoptId, 995 Thread::kNoDeoptId, token_pos());
992 token_pos());
993 compiler->RecordSafepoint(locs()); 996 compiler->RecordSafepoint(locs());
994 __ PopLocal(locs()->out(0).reg()); 997 __ PopLocal(locs()->out(0).reg());
995 } else { 998 } else {
996 __ PushConstant(cls()); 999 __ PushConstant(cls());
997 __ AllocateT(); 1000 __ AllocateT();
998 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 1001 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
999 Thread::kNoDeoptId, 1002 Thread::kNoDeoptId, token_pos());
1000 token_pos());
1001 compiler->RecordSafepoint(locs()); 1003 compiler->RecordSafepoint(locs());
1002 } 1004 }
1003 } else if (compiler->is_optimizing()) { 1005 } else if (compiler->is_optimizing()) {
1004 // If we're optimizing, try a streamlined fastpath. 1006 // If we're optimizing, try a streamlined fastpath.
1005 const intptr_t instance_size = cls().instance_size(); 1007 const intptr_t instance_size = cls().instance_size();
1006 Isolate* isolate = Isolate::Current(); 1008 Isolate* isolate = Isolate::Current();
1007 if (Heap::IsAllocatableInNewSpace(instance_size) && 1009 if (Heap::IsAllocatableInNewSpace(instance_size) &&
1008 !cls().TraceAllocation(isolate)) { 1010 !cls().TraceAllocation(isolate)) {
1009 uword tags = 0; 1011 uword tags = 0;
1010 tags = RawObject::SizeTag::update(instance_size, tags); 1012 tags = RawObject::SizeTag::update(instance_size, tags);
1011 ASSERT(cls().id() != kIllegalCid); 1013 ASSERT(cls().id() != kIllegalCid);
1012 tags = RawObject::ClassIdTag::update(cls().id(), tags); 1014 tags = RawObject::ClassIdTag::update(cls().id(), tags);
1013 if (Smi::IsValid(tags)) { 1015 if (Smi::IsValid(tags)) {
1014 const intptr_t tags_kidx = __ AddConstant(Smi::Handle(Smi::New(tags))); 1016 const intptr_t tags_kidx = __ AddConstant(Smi::Handle(Smi::New(tags)));
1015 __ AllocateOpt(locs()->out(0).reg(), tags_kidx); 1017 __ AllocateOpt(locs()->out(0).reg(), tags_kidx);
1016 } 1018 }
1017 } 1019 }
1018 const intptr_t kidx = __ AddConstant(cls()); 1020 const intptr_t kidx = __ AddConstant(cls());
1019 __ Allocate(kidx); 1021 __ Allocate(kidx);
1020 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 1022 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId,
1021 Thread::kNoDeoptId,
1022 token_pos()); 1023 token_pos());
1023 compiler->RecordSafepoint(locs()); 1024 compiler->RecordSafepoint(locs());
1024 __ PopLocal(locs()->out(0).reg()); 1025 __ PopLocal(locs()->out(0).reg());
1025 } else { 1026 } else {
1026 const intptr_t kidx = __ AddConstant(cls()); 1027 const intptr_t kidx = __ AddConstant(cls());
1027 __ Allocate(kidx); 1028 __ Allocate(kidx);
1028 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 1029 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId,
1029 Thread::kNoDeoptId,
1030 token_pos()); 1030 token_pos());
1031 compiler->RecordSafepoint(locs()); 1031 compiler->RecordSafepoint(locs());
1032 } 1032 }
1033 } 1033 }
1034 1034
1035 1035
1036 EMIT_NATIVE_CODE(StoreInstanceField, 2) { 1036 EMIT_NATIVE_CODE(StoreInstanceField, 2) {
1037 ASSERT(!HasTemp()); 1037 ASSERT(!HasTemp());
1038 ASSERT(offset_in_bytes() % kWordSize == 0); 1038 ASSERT(offset_in_bytes() % kWordSize == 0);
1039 if (compiler->is_optimizing()) { 1039 if (compiler->is_optimizing()) {
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
1083 EMIT_NATIVE_CODE(BooleanNegate, 1, Location::RequiresRegister()) { 1083 EMIT_NATIVE_CODE(BooleanNegate, 1, Location::RequiresRegister()) {
1084 if (compiler->is_optimizing()) { 1084 if (compiler->is_optimizing()) {
1085 __ BooleanNegate(locs()->out(0).reg(), locs()->in(0).reg()); 1085 __ BooleanNegate(locs()->out(0).reg(), locs()->in(0).reg());
1086 } else { 1086 } else {
1087 __ BooleanNegateTOS(); 1087 __ BooleanNegateTOS();
1088 } 1088 }
1089 } 1089 }
1090 1090
1091 1091
1092 EMIT_NATIVE_CODE(AllocateContext, 1092 EMIT_NATIVE_CODE(AllocateContext,
1093 0, Location::RequiresRegister(), 1093 0,
1094 Location::RequiresRegister(),
1094 LocationSummary::kCall) { 1095 LocationSummary::kCall) {
1095 ASSERT(!compiler->is_optimizing()); 1096 ASSERT(!compiler->is_optimizing());
1096 __ AllocateContext(num_context_variables()); 1097 __ AllocateContext(num_context_variables());
1097 compiler->RecordSafepoint(locs()); 1098 compiler->RecordSafepoint(locs());
1098 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 1099 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId,
1099 Thread::kNoDeoptId,
1100 token_pos()); 1100 token_pos());
1101 } 1101 }
1102 1102
1103 1103
1104 EMIT_NATIVE_CODE(AllocateUninitializedContext, 1104 EMIT_NATIVE_CODE(AllocateUninitializedContext,
1105 0, Location::RequiresRegister(), 1105 0,
1106 Location::RequiresRegister(),
1106 LocationSummary::kCall) { 1107 LocationSummary::kCall) {
1107 ASSERT(compiler->is_optimizing()); 1108 ASSERT(compiler->is_optimizing());
1108 __ AllocateUninitializedContext(locs()->out(0).reg(), 1109 __ AllocateUninitializedContext(locs()->out(0).reg(),
1109 num_context_variables()); 1110 num_context_variables());
1110 __ AllocateContext(num_context_variables()); 1111 __ AllocateContext(num_context_variables());
1111 compiler->RecordSafepoint(locs()); 1112 compiler->RecordSafepoint(locs());
1112 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 1113 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId,
1113 Thread::kNoDeoptId,
1114 token_pos()); 1114 token_pos());
1115 __ PopLocal(locs()->out(0).reg()); 1115 __ PopLocal(locs()->out(0).reg());
1116 } 1116 }
1117 1117
1118 1118
1119 EMIT_NATIVE_CODE(CloneContext, 1119 EMIT_NATIVE_CODE(CloneContext,
1120 1, Location::RequiresRegister(), 1120 1,
1121 Location::RequiresRegister(),
1121 LocationSummary::kCall) { 1122 LocationSummary::kCall) {
1122 if (compiler->is_optimizing()) { 1123 if (compiler->is_optimizing()) {
1123 __ Push(locs()->in(0).reg()); 1124 __ Push(locs()->in(0).reg());
1124 } 1125 }
1125 __ CloneContext(); 1126 __ CloneContext();
1126 compiler->RecordSafepoint(locs()); 1127 compiler->RecordSafepoint(locs());
1127 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 1128 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId,
1128 Thread::kNoDeoptId,
1129 token_pos()); 1129 token_pos());
1130 if (compiler->is_optimizing()) { 1130 if (compiler->is_optimizing()) {
1131 __ PopLocal(locs()->out(0).reg()); 1131 __ PopLocal(locs()->out(0).reg());
1132 } 1132 }
1133 } 1133 }
1134 1134
1135 1135
1136 EMIT_NATIVE_CODE(CatchBlockEntry, 0) { 1136 EMIT_NATIVE_CODE(CatchBlockEntry, 0) {
1137 __ Bind(compiler->GetJumpLabel(this)); 1137 __ Bind(compiler->GetJumpLabel(this));
1138 compiler->AddExceptionHandler(catch_try_index(), 1138 compiler->AddExceptionHandler(catch_try_index(), try_index(),
1139 try_index(),
1140 compiler->assembler()->CodeSize(), 1139 compiler->assembler()->CodeSize(),
1141 catch_handler_types_, 1140 catch_handler_types_, needs_stacktrace());
1142 needs_stacktrace());
1143 1141
1144 if (HasParallelMove()) { 1142 if (HasParallelMove()) {
1145 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); 1143 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
1146 } 1144 }
1147 if (compiler->is_optimizing()) { 1145 if (compiler->is_optimizing()) {
1148 // In optimized code, variables at the catch block entry reside at the top 1146 // In optimized code, variables at the catch block entry reside at the top
1149 // of the allocatable register range. 1147 // of the allocatable register range.
1150 const intptr_t num_non_copied_params = 1148 const intptr_t num_non_copied_params =
1151 compiler->flow_graph().num_non_copied_params(); 1149 compiler->flow_graph().num_non_copied_params();
1152 const intptr_t exception_reg = kNumberOfCpuRegisters - 1150 const intptr_t exception_reg =
1151 kNumberOfCpuRegisters -
1153 (-exception_var().index() + num_non_copied_params); 1152 (-exception_var().index() + num_non_copied_params);
1154 const intptr_t stacktrace_reg = kNumberOfCpuRegisters - 1153 const intptr_t stacktrace_reg =
1154 kNumberOfCpuRegisters -
1155 (-stacktrace_var().index() + num_non_copied_params); 1155 (-stacktrace_var().index() + num_non_copied_params);
1156 __ MoveSpecial(exception_reg, Simulator::kExceptionSpecialIndex); 1156 __ MoveSpecial(exception_reg, Simulator::kExceptionSpecialIndex);
1157 __ MoveSpecial(stacktrace_reg, Simulator::kStacktraceSpecialIndex); 1157 __ MoveSpecial(stacktrace_reg, Simulator::kStacktraceSpecialIndex);
1158 } else { 1158 } else {
1159 __ MoveSpecial(LocalVarIndex(0, exception_var().index()), 1159 __ MoveSpecial(LocalVarIndex(0, exception_var().index()),
1160 Simulator::kExceptionSpecialIndex); 1160 Simulator::kExceptionSpecialIndex);
1161 __ MoveSpecial(LocalVarIndex(0, stacktrace_var().index()), 1161 __ MoveSpecial(LocalVarIndex(0, stacktrace_var().index()),
1162 Simulator::kStacktraceSpecialIndex); 1162 Simulator::kStacktraceSpecialIndex);
1163 } 1163 }
1164 __ SetFrame(compiler->StackSize()); 1164 __ SetFrame(compiler->StackSize());
1165 } 1165 }
1166 1166
1167 1167
1168 EMIT_NATIVE_CODE(Throw, 0, Location::NoLocation(), LocationSummary::kCall) { 1168 EMIT_NATIVE_CODE(Throw, 0, Location::NoLocation(), LocationSummary::kCall) {
1169 __ Throw(0); 1169 __ Throw(0);
1170 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 1170 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(),
1171 deopt_id(),
1172 token_pos()); 1171 token_pos());
1173 compiler->RecordAfterCall(this); 1172 compiler->RecordAfterCall(this);
1174 __ Trap(); 1173 __ Trap();
1175 } 1174 }
1176 1175
1177 1176
1178 EMIT_NATIVE_CODE(ReThrow, 0, Location::NoLocation(), LocationSummary::kCall) { 1177 EMIT_NATIVE_CODE(ReThrow, 0, Location::NoLocation(), LocationSummary::kCall) {
1179 compiler->SetNeedsStacktrace(catch_try_index()); 1178 compiler->SetNeedsStacktrace(catch_try_index());
1180 __ Throw(1); 1179 __ Throw(1);
1181 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 1180 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(),
1182 deopt_id(),
1183 token_pos()); 1181 token_pos());
1184 compiler->RecordAfterCall(this); 1182 compiler->RecordAfterCall(this);
1185 __ Trap(); 1183 __ Trap();
1186 } 1184 }
1187 1185
1188 EMIT_NATIVE_CODE(InstantiateType, 1186 EMIT_NATIVE_CODE(InstantiateType,
1189 1, Location::RequiresRegister(), 1187 1,
1188 Location::RequiresRegister(),
1190 LocationSummary::kCall) { 1189 LocationSummary::kCall) {
1191 if (compiler->is_optimizing()) { 1190 if (compiler->is_optimizing()) {
1192 __ Push(locs()->in(0).reg()); 1191 __ Push(locs()->in(0).reg());
1193 } 1192 }
1194 __ InstantiateType(__ AddConstant(type())); 1193 __ InstantiateType(__ AddConstant(type()));
1195 compiler->RecordSafepoint(locs()); 1194 compiler->RecordSafepoint(locs());
1196 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 1195 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(),
1197 deopt_id(),
1198 token_pos()); 1196 token_pos());
1199 if (compiler->is_optimizing()) { 1197 if (compiler->is_optimizing()) {
1200 __ PopLocal(locs()->out(0).reg()); 1198 __ PopLocal(locs()->out(0).reg());
1201 } 1199 }
1202 } 1200 }
1203 1201
1204 EMIT_NATIVE_CODE(InstantiateTypeArguments, 1202 EMIT_NATIVE_CODE(InstantiateTypeArguments,
1205 1, Location::RequiresRegister(), 1203 1,
1204 Location::RequiresRegister(),
1206 LocationSummary::kCall) { 1205 LocationSummary::kCall) {
1207 if (compiler->is_optimizing()) { 1206 if (compiler->is_optimizing()) {
1208 __ Push(locs()->in(0).reg()); 1207 __ Push(locs()->in(0).reg());
1209 } 1208 }
1210 __ InstantiateTypeArgumentsTOS( 1209 __ InstantiateTypeArgumentsTOS(
1211 type_arguments().IsRawInstantiatedRaw(type_arguments().Length()), 1210 type_arguments().IsRawInstantiatedRaw(type_arguments().Length()),
1212 __ AddConstant(type_arguments())); 1211 __ AddConstant(type_arguments()));
1213 compiler->RecordSafepoint(locs()); 1212 compiler->RecordSafepoint(locs());
1214 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 1213 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(),
1215 deopt_id(),
1216 token_pos()); 1214 token_pos());
1217 if (compiler->is_optimizing()) { 1215 if (compiler->is_optimizing()) {
1218 __ PopLocal(locs()->out(0).reg()); 1216 __ PopLocal(locs()->out(0).reg());
1219 } 1217 }
1220 } 1218 }
1221 1219
1222 1220
1223 void DebugStepCheckInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 1221 void DebugStepCheckInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
1224 __ DebugStep(); 1222 __ DebugStep();
1225 compiler->AddCurrentDescriptor(stub_kind_, Thread::kNoDeoptId, token_pos()); 1223 compiler->AddCurrentDescriptor(stub_kind_, Thread::kNoDeoptId, token_pos());
1226 } 1224 }
1227 1225
1228 1226
1229 void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 1227 void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
1230 if (!compiler->CanFallThroughTo(normal_entry())) { 1228 if (!compiler->CanFallThroughTo(normal_entry())) {
1231 __ Jump(compiler->GetJumpLabel(normal_entry())); 1229 __ Jump(compiler->GetJumpLabel(normal_entry()));
1232 } 1230 }
1233 } 1231 }
1234 1232
1235 1233
1236 LocationSummary* Instruction::MakeCallSummary(Zone* zone) { 1234 LocationSummary* Instruction::MakeCallSummary(Zone* zone) {
1237 LocationSummary* result = new(zone) LocationSummary( 1235 LocationSummary* result =
1238 zone, 0, 0, LocationSummary::kCall); 1236 new (zone) LocationSummary(zone, 0, 0, LocationSummary::kCall);
1239 // TODO(vegorov) support allocating out registers for calls. 1237 // TODO(vegorov) support allocating out registers for calls.
1240 // Currently we require them to be fixed. 1238 // Currently we require them to be fixed.
1241 result->set_out(0, Location::RegisterLocation(0)); 1239 result->set_out(0, Location::RegisterLocation(0));
1242 return result; 1240 return result;
1243 } 1241 }
1244 1242
1245 1243
1246 CompileType BinaryUint32OpInstr::ComputeType() const { 1244 CompileType BinaryUint32OpInstr::ComputeType() const {
1247 return CompileType::Int(); 1245 return CompileType::Int();
1248 } 1246 }
1249 1247
1250 1248
1251 CompileType ShiftUint32OpInstr::ComputeType() const { 1249 CompileType ShiftUint32OpInstr::ComputeType() const {
1252 return CompileType::Int(); 1250 return CompileType::Int();
1253 } 1251 }
1254 1252
1255 1253
1256 CompileType UnaryUint32OpInstr::ComputeType() const { 1254 CompileType UnaryUint32OpInstr::ComputeType() const {
1257 return CompileType::Int(); 1255 return CompileType::Int();
1258 } 1256 }
1259 1257
1260 1258
1261 static const intptr_t kMintShiftCountLimit = 63; 1259 static const intptr_t kMintShiftCountLimit = 63;
1262 1260
1263 1261
1264 bool ShiftMintOpInstr::has_shift_count_check() const { 1262 bool ShiftMintOpInstr::has_shift_count_check() const {
1265 return !RangeUtils::IsWithin( 1263 return !RangeUtils::IsWithin(right()->definition()->range(), 0,
1266 right()->definition()->range(), 0, kMintShiftCountLimit); 1264 kMintShiftCountLimit);
1267 } 1265 }
1268 1266
1269 1267
1270 CompileType LoadIndexedInstr::ComputeType() const { 1268 CompileType LoadIndexedInstr::ComputeType() const {
1271 switch (class_id_) { 1269 switch (class_id_) {
1272 case kArrayCid: 1270 case kArrayCid:
1273 case kImmutableArrayCid: 1271 case kImmutableArrayCid:
1274 return CompileType::Dynamic(); 1272 return CompileType::Dynamic();
1275 1273
1276 case kTypedDataFloat32ArrayCid: 1274 case kTypedDataFloat32ArrayCid:
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after
1382 return kUnboxedFloat64x2; 1380 return kUnboxedFloat64x2;
1383 default: 1381 default:
1384 UNREACHABLE(); 1382 UNREACHABLE();
1385 return kTagged; 1383 return kTagged;
1386 } 1384 }
1387 } 1385 }
1388 1386
1389 1387
1390 void Environment::DropArguments(intptr_t argc) { 1388 void Environment::DropArguments(intptr_t argc) {
1391 #if defined(DEBUG) 1389 #if defined(DEBUG)
1392 // Check that we are in the backend - register allocation has been run. 1390 // Check that we are in the backend - register allocation has been run.
1393 ASSERT(locations_ != NULL); 1391 ASSERT(locations_ != NULL);
1394 1392
1395 // Check that we are only dropping a valid number of instructions from the 1393 // Check that we are only dropping a valid number of instructions from the
1396 // environment. 1394 // environment.
1397 ASSERT(argc <= values_.length()); 1395 ASSERT(argc <= values_.length());
1398 #endif 1396 #endif
1399 values_.TruncateTo(values_.length() - argc); 1397 values_.TruncateTo(values_.length() - argc);
1400 } 1398 }
1401 1399
1402 1400
1403 EMIT_NATIVE_CODE(CheckSmi, 1) { 1401 EMIT_NATIVE_CODE(CheckSmi, 1) {
1404 __ CheckSmi(locs()->in(0).reg()); 1402 __ CheckSmi(locs()->in(0).reg());
1405 compiler->EmitDeopt(deopt_id(), 1403 compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckSmi,
1406 ICData::kDeoptCheckSmi,
1407 licm_hoisted_ ? ICData::kHoisted : 0); 1404 licm_hoisted_ ? ICData::kHoisted : 0);
1408 } 1405 }
1409 1406
1410 1407
1411 EMIT_NATIVE_CODE(CheckEitherNonSmi, 2) { 1408 EMIT_NATIVE_CODE(CheckEitherNonSmi, 2) {
1412 const Register left = locs()->in(0).reg(); 1409 const Register left = locs()->in(0).reg();
1413 const Register right = locs()->in(1).reg(); 1410 const Register right = locs()->in(1).reg();
1414 __ CheckEitherNonSmi(left, right); 1411 __ CheckEitherNonSmi(left, right);
1415 compiler->EmitDeopt(deopt_id(), ICData::kDeoptBinaryDoubleOp, 1412 compiler->EmitDeopt(deopt_id(), ICData::kDeoptBinaryDoubleOp,
1416 licm_hoisted_ ? ICData::kHoisted : 0); 1413 licm_hoisted_ ? ICData::kHoisted : 0);
1417 } 1414 }
1418 1415
1419 1416
1420 EMIT_NATIVE_CODE(CheckClassId, 1) { 1417 EMIT_NATIVE_CODE(CheckClassId, 1) {
1421 __ CheckClassId(locs()->in(0).reg(), 1418 __ CheckClassId(locs()->in(0).reg(), compiler->ToEmbeddableCid(cid_, this));
1422 compiler->ToEmbeddableCid(cid_, this));
1423 compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckClass); 1419 compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckClass);
1424 } 1420 }
1425 1421
1426 1422
1427 EMIT_NATIVE_CODE(CheckClass, 1) { 1423 EMIT_NATIVE_CODE(CheckClass, 1) {
1428 const Register value = locs()->in(0).reg(); 1424 const Register value = locs()->in(0).reg();
1429 if (IsNullCheck()) { 1425 if (IsNullCheck()) {
1430 ASSERT(DeoptIfNull() || DeoptIfNotNull()); 1426 ASSERT(DeoptIfNull() || DeoptIfNotNull());
1431 if (DeoptIfNull()) { 1427 if (DeoptIfNull()) {
1432 __ IfEqNull(value); 1428 __ IfEqNull(value);
(...skipping 12 matching lines...) Expand all
1445 cid_mask = ComputeCidMask(); 1441 cid_mask = ComputeCidMask();
1446 is_dense_switch = Smi::IsValid(cid_mask); 1442 is_dense_switch = Smi::IsValid(cid_mask);
1447 } 1443 }
1448 if (is_dense_switch) { 1444 if (is_dense_switch) {
1449 const intptr_t low_cid = cids_[0]; 1445 const intptr_t low_cid = cids_[0];
1450 __ CheckDenseSwitch(value, may_be_smi); 1446 __ CheckDenseSwitch(value, may_be_smi);
1451 __ Nop(compiler->ToEmbeddableCid(low_cid, this)); 1447 __ Nop(compiler->ToEmbeddableCid(low_cid, this));
1452 __ Nop(__ AddConstant(Smi::Handle(Smi::New(cid_mask)))); 1448 __ Nop(__ AddConstant(Smi::Handle(Smi::New(cid_mask))));
1453 } else { 1449 } else {
1454 GrowableArray<CidTarget> sorted_ic_data; 1450 GrowableArray<CidTarget> sorted_ic_data;
1455 FlowGraphCompiler::SortICDataByCount(unary_checks(), 1451 FlowGraphCompiler::SortICDataByCount(unary_checks(), &sorted_ic_data,
1456 &sorted_ic_data,
1457 /* drop_smi = */ true); 1452 /* drop_smi = */ true);
1458 const intptr_t sorted_length = sorted_ic_data.length(); 1453 const intptr_t sorted_length = sorted_ic_data.length();
1459 if (!Utils::IsUint(8, sorted_length)) { 1454 if (!Utils::IsUint(8, sorted_length)) {
1460 Unsupported(compiler); 1455 Unsupported(compiler);
1461 UNREACHABLE(); 1456 UNREACHABLE();
1462 } 1457 }
1463 __ CheckCids(value, may_be_smi, sorted_length); 1458 __ CheckCids(value, may_be_smi, sorted_length);
1464 for (intptr_t i = 0; i < sorted_length; i++) { 1459 for (intptr_t i = 0; i < sorted_length; i++) {
1465 __ Nop(compiler->ToEmbeddableCid(sorted_ic_data[i].cid, this)); 1460 __ Nop(compiler->ToEmbeddableCid(sorted_ic_data[i].cid, this));
1466 } 1461 }
1467 } 1462 }
1468 } 1463 }
1469 compiler->EmitDeopt(deopt_id(), 1464 compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckClass,
1470 ICData::kDeoptCheckClass,
1471 licm_hoisted_ ? ICData::kHoisted : 0); 1465 licm_hoisted_ ? ICData::kHoisted : 0);
1472 } 1466 }
1473 1467
1474 1468
1475 EMIT_NATIVE_CODE(BinarySmiOp, 2, Location::RequiresRegister()) { 1469 EMIT_NATIVE_CODE(BinarySmiOp, 2, Location::RequiresRegister()) {
1476 const Register left = locs()->in(0).reg(); 1470 const Register left = locs()->in(0).reg();
1477 const Register right = locs()->in(1).reg(); 1471 const Register right = locs()->in(1).reg();
1478 const Register out = locs()->out(0).reg(); 1472 const Register out = locs()->out(0).reg();
1479 const bool can_deopt = CanDeoptimize(); 1473 const bool can_deopt = CanDeoptimize();
1480 bool needs_nop = false; 1474 bool needs_nop = false;
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
1558 uword tags = 0; 1552 uword tags = 0;
1559 tags = RawObject::SizeTag::update(instance_size, tags); 1553 tags = RawObject::SizeTag::update(instance_size, tags);
1560 tags = RawObject::ClassIdTag::update(compiler->double_class().id(), tags); 1554 tags = RawObject::ClassIdTag::update(compiler->double_class().id(), tags);
1561 if (Smi::IsValid(tags)) { 1555 if (Smi::IsValid(tags)) {
1562 const intptr_t tags_kidx = __ AddConstant(Smi::Handle(Smi::New(tags))); 1556 const intptr_t tags_kidx = __ AddConstant(Smi::Handle(Smi::New(tags)));
1563 __ AllocateOpt(out, tags_kidx); 1557 __ AllocateOpt(out, tags_kidx);
1564 } 1558 }
1565 } 1559 }
1566 const intptr_t kidx = __ AddConstant(compiler->double_class()); 1560 const intptr_t kidx = __ AddConstant(compiler->double_class());
1567 __ Allocate(kidx); 1561 __ Allocate(kidx);
1568 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 1562 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId,
1569 Thread::kNoDeoptId,
1570 token_pos()); 1563 token_pos());
1571 compiler->RecordSafepoint(locs()); 1564 compiler->RecordSafepoint(locs());
1572 __ PopLocal(out); 1565 __ PopLocal(out);
1573 __ WriteIntoDouble(out, value); 1566 __ WriteIntoDouble(out, value);
1574 } 1567 }
1575 1568
1576 1569
1577 EMIT_NATIVE_CODE(Unbox, 1, Location::RequiresRegister()) { 1570 EMIT_NATIVE_CODE(Unbox, 1, Location::RequiresRegister()) {
1578 ASSERT(representation() == kUnboxedDouble); 1571 ASSERT(representation() == kUnboxedDouble);
1579 const intptr_t value_cid = value()->Type()->ToCid(); 1572 const intptr_t value_cid = value()->Type()->ToCid();
1580 const intptr_t box_cid = BoxCid(); 1573 const intptr_t box_cid = BoxCid();
1581 const Register box = locs()->in(0).reg(); 1574 const Register box = locs()->in(0).reg();
1582 const Register result = locs()->out(0).reg(); 1575 const Register result = locs()->out(0).reg();
1583 if (value_cid == box_cid) { 1576 if (value_cid == box_cid) {
1584 __ UnboxDouble(result, box); 1577 __ UnboxDouble(result, box);
1585 } else if (CanConvertSmi() && (value_cid == kSmiCid)) { 1578 } else if (CanConvertSmi() && (value_cid == kSmiCid)) {
1586 __ SmiToDouble(result, box); 1579 __ SmiToDouble(result, box);
1587 } else if ((value()->Type()->ToNullableCid() == box_cid) && 1580 } else if ((value()->Type()->ToNullableCid() == box_cid) &&
1588 value()->Type()->is_nullable()) { 1581 value()->Type()->is_nullable()) {
1589 __ IfEqNull(box); 1582 __ IfEqNull(box);
1590 compiler->EmitDeopt(GetDeoptId(), ICData::kDeoptCheckClass); 1583 compiler->EmitDeopt(GetDeoptId(), ICData::kDeoptCheckClass);
1591 __ UnboxDouble(result, box); 1584 __ UnboxDouble(result, box);
1592 } else { 1585 } else {
1593 __ CheckedUnboxDouble(result, box); 1586 __ CheckedUnboxDouble(result, box);
1594 compiler->EmitDeopt(GetDeoptId(), ICData::kDeoptCheckClass); 1587 compiler->EmitDeopt(GetDeoptId(), ICData::kDeoptCheckClass);
1595 } 1588 }
1596 } 1589 }
1597 1590
1598 1591
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
1644 const Register result = locs()->out(0).reg(); 1637 const Register result = locs()->out(0).reg();
1645 __ SmiToDouble(result, value); 1638 __ SmiToDouble(result, value);
1646 } 1639 }
1647 1640
1648 1641
1649 EMIT_NATIVE_CODE(BinaryDoubleOp, 2, Location::RequiresRegister()) { 1642 EMIT_NATIVE_CODE(BinaryDoubleOp, 2, Location::RequiresRegister()) {
1650 const Register left = locs()->in(0).reg(); 1643 const Register left = locs()->in(0).reg();
1651 const Register right = locs()->in(1).reg(); 1644 const Register right = locs()->in(1).reg();
1652 const Register result = locs()->out(0).reg(); 1645 const Register result = locs()->out(0).reg();
1653 switch (op_kind()) { 1646 switch (op_kind()) {
1654 case Token::kADD: __ DAdd(result, left, right); break; 1647 case Token::kADD:
1655 case Token::kSUB: __ DSub(result, left, right); break; 1648 __ DAdd(result, left, right);
1656 case Token::kMUL: __ DMul(result, left, right); break; 1649 break;
1657 case Token::kDIV: __ DDiv(result, left, right); break; 1650 case Token::kSUB:
1658 default: UNREACHABLE(); 1651 __ DSub(result, left, right);
1652 break;
1653 case Token::kMUL:
1654 __ DMul(result, left, right);
1655 break;
1656 case Token::kDIV:
1657 __ DDiv(result, left, right);
1658 break;
1659 default:
1660 UNREACHABLE();
1659 } 1661 }
1660 } 1662 }
1661 1663
1662 1664
1663 Condition DoubleTestOpInstr::EmitComparisonCode(FlowGraphCompiler* compiler, 1665 Condition DoubleTestOpInstr::EmitComparisonCode(FlowGraphCompiler* compiler,
1664 BranchLabels labels) { 1666 BranchLabels labels) {
1665 UNREACHABLE(); 1667 UNREACHABLE();
1666 return Condition(); 1668 return Condition();
1667 } 1669 }
1668 1670
1669 1671
1670 void DoubleTestOpInstr::EmitBranchCode(FlowGraphCompiler* compiler, 1672 void DoubleTestOpInstr::EmitBranchCode(FlowGraphCompiler* compiler,
1671 BranchInstr* branch) { 1673 BranchInstr* branch) {
1672 ASSERT(compiler->is_optimizing()); 1674 ASSERT(compiler->is_optimizing());
1673 BranchLabels labels = compiler->CreateBranchLabels(branch); 1675 BranchLabels labels = compiler->CreateBranchLabels(branch);
1674 const Register value = locs()->in(0).reg(); 1676 const Register value = locs()->in(0).reg();
1675 switch (op_kind()) { 1677 switch (op_kind()) {
1676 case MethodRecognizer::kDouble_getIsNaN: 1678 case MethodRecognizer::kDouble_getIsNaN:
1677 __ DoubleIsNaN(value); 1679 __ DoubleIsNaN(value);
1678 break; 1680 break;
1679 case MethodRecognizer::kDouble_getIsInfinite: 1681 case MethodRecognizer::kDouble_getIsInfinite:
1680 __ DoubleIsInfinite(value); 1682 __ DoubleIsInfinite(value);
1681 break; 1683 break;
1682 default: 1684 default:
1683 UNREACHABLE(); 1685 UNREACHABLE();
1684 } 1686 }
1685 const bool is_negated = kind() != Token::kEQ; 1687 const bool is_negated = kind() != Token::kEQ;
1686 EmitBranchOnCondition( 1688 EmitBranchOnCondition(compiler, is_negated ? NEXT_IS_FALSE : NEXT_IS_TRUE,
1687 compiler, is_negated ? NEXT_IS_FALSE : NEXT_IS_TRUE, labels); 1689 labels);
1688 } 1690 }
1689 1691
1690 1692
1691 EMIT_NATIVE_CODE(DoubleTestOp, 1, Location::RequiresRegister()) { 1693 EMIT_NATIVE_CODE(DoubleTestOp, 1, Location::RequiresRegister()) {
1692 ASSERT(compiler->is_optimizing()); 1694 ASSERT(compiler->is_optimizing());
1693 const Register value = locs()->in(0).reg(); 1695 const Register value = locs()->in(0).reg();
1694 const Register result = locs()->out(0).reg(); 1696 const Register result = locs()->out(0).reg();
1695 const bool is_negated = kind() != Token::kEQ; 1697 const bool is_negated = kind() != Token::kEQ;
1696 __ LoadConstant(result, is_negated ? Bool::True() : Bool::False()); 1698 __ LoadConstant(result, is_negated ? Bool::True() : Bool::False());
1697 switch (op_kind()) { 1699 switch (op_kind()) {
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
1756 1758
1757 1759
1758 EMIT_NATIVE_CODE(FloatToDouble, 1, Location::RequiresRegister()) { 1760 EMIT_NATIVE_CODE(FloatToDouble, 1, Location::RequiresRegister()) {
1759 const Register in = locs()->in(0).reg(); 1761 const Register in = locs()->in(0).reg();
1760 const Register result = locs()->out(0).reg(); 1762 const Register result = locs()->out(0).reg();
1761 __ FloatToDouble(result, in); 1763 __ FloatToDouble(result, in);
1762 } 1764 }
1763 1765
1764 1766
1765 EMIT_NATIVE_CODE(InvokeMathCFunction, 1767 EMIT_NATIVE_CODE(InvokeMathCFunction,
1766 InputCount(), Location::RequiresRegister()) { 1768 InputCount(),
1769 Location::RequiresRegister()) {
1767 const Register left = locs()->in(0).reg(); 1770 const Register left = locs()->in(0).reg();
1768 const Register result = locs()->out(0).reg(); 1771 const Register result = locs()->out(0).reg();
1769 if (recognized_kind() == MethodRecognizer::kMathDoublePow) { 1772 if (recognized_kind() == MethodRecognizer::kMathDoublePow) {
1770 const Register right = locs()->in(1).reg(); 1773 const Register right = locs()->in(1).reg();
1771 __ DPow(result, left, right); 1774 __ DPow(result, left, right);
1772 } else if (recognized_kind() == MethodRecognizer::kDoubleMod) { 1775 } else if (recognized_kind() == MethodRecognizer::kDoubleMod) {
1773 const Register right = locs()->in(1).reg(); 1776 const Register right = locs()->in(1).reg();
1774 __ DMod(result, left, right); 1777 __ DMod(result, left, right);
1775 } else if (recognized_kind() == MethodRecognizer::kMathSin) { 1778 } else if (recognized_kind() == MethodRecognizer::kMathSin) {
1776 __ DSin(result, left); 1779 __ DSin(result, left);
(...skipping 24 matching lines...) Expand all
1801 __ Min(result, left, right); 1804 __ Min(result, left, right);
1802 } else { 1805 } else {
1803 __ Max(result, left, right); 1806 __ Max(result, left, right);
1804 } 1807 }
1805 } 1808 }
1806 } 1809 }
1807 1810
1808 1811
1809 static Token::Kind FlipCondition(Token::Kind kind) { 1812 static Token::Kind FlipCondition(Token::Kind kind) {
1810 switch (kind) { 1813 switch (kind) {
1811 case Token::kEQ: return Token::kNE; 1814 case Token::kEQ:
1812 case Token::kNE: return Token::kEQ; 1815 return Token::kNE;
1813 case Token::kLT: return Token::kGTE; 1816 case Token::kNE:
1814 case Token::kGT: return Token::kLTE; 1817 return Token::kEQ;
1815 case Token::kLTE: return Token::kGT; 1818 case Token::kLT:
1816 case Token::kGTE: return Token::kLT; 1819 return Token::kGTE;
1820 case Token::kGT:
1821 return Token::kLTE;
1822 case Token::kLTE:
1823 return Token::kGT;
1824 case Token::kGTE:
1825 return Token::kLT;
1817 default: 1826 default:
1818 UNREACHABLE(); 1827 UNREACHABLE();
1819 return Token::kNE; 1828 return Token::kNE;
1820 } 1829 }
1821 } 1830 }
1822 1831
1823 1832
1824 static Bytecode::Opcode OpcodeForSmiCondition(Token::Kind kind) { 1833 static Bytecode::Opcode OpcodeForSmiCondition(Token::Kind kind) {
1825 switch (kind) { 1834 switch (kind) {
1826 case Token::kEQ: return Bytecode::kIfEqStrict; 1835 case Token::kEQ:
1827 case Token::kNE: return Bytecode::kIfNeStrict; 1836 return Bytecode::kIfEqStrict;
1828 case Token::kLT: return Bytecode::kIfLt; 1837 case Token::kNE:
1829 case Token::kGT: return Bytecode::kIfGt; 1838 return Bytecode::kIfNeStrict;
1830 case Token::kLTE: return Bytecode::kIfLe; 1839 case Token::kLT:
1831 case Token::kGTE: return Bytecode::kIfGe; 1840 return Bytecode::kIfLt;
1841 case Token::kGT:
1842 return Bytecode::kIfGt;
1843 case Token::kLTE:
1844 return Bytecode::kIfLe;
1845 case Token::kGTE:
1846 return Bytecode::kIfGe;
1832 default: 1847 default:
1833 UNREACHABLE(); 1848 UNREACHABLE();
1834 return Bytecode::kTrap; 1849 return Bytecode::kTrap;
1835 } 1850 }
1836 } 1851 }
1837 1852
1838 1853
1839 static Bytecode::Opcode OpcodeForDoubleCondition(Token::Kind kind) { 1854 static Bytecode::Opcode OpcodeForDoubleCondition(Token::Kind kind) {
1840 switch (kind) { 1855 switch (kind) {
1841 case Token::kEQ: return Bytecode::kIfDEq; 1856 case Token::kEQ:
1842 case Token::kNE: return Bytecode::kIfDNe; 1857 return Bytecode::kIfDEq;
1843 case Token::kLT: return Bytecode::kIfDLt; 1858 case Token::kNE:
1844 case Token::kGT: return Bytecode::kIfDGt; 1859 return Bytecode::kIfDNe;
1845 case Token::kLTE: return Bytecode::kIfDLe; 1860 case Token::kLT:
1846 case Token::kGTE: return Bytecode::kIfDGe; 1861 return Bytecode::kIfDLt;
1862 case Token::kGT:
1863 return Bytecode::kIfDGt;
1864 case Token::kLTE:
1865 return Bytecode::kIfDLe;
1866 case Token::kGTE:
1867 return Bytecode::kIfDGe;
1847 default: 1868 default:
1848 UNREACHABLE(); 1869 UNREACHABLE();
1849 return Bytecode::kTrap; 1870 return Bytecode::kTrap;
1850 } 1871 }
1851 } 1872 }
1852 1873
1853 1874
1854 static Condition EmitSmiComparisonOp(FlowGraphCompiler* compiler, 1875 static Condition EmitSmiComparisonOp(FlowGraphCompiler* compiler,
1855 LocationSummary* locs, 1876 LocationSummary* locs,
1856 Token::Kind kind, 1877 Token::Kind kind,
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
1900 } 1921 }
1901 1922
1902 1923
1903 EMIT_NATIVE_CODE(EqualityCompare, 2, Location::RequiresRegister()) { 1924 EMIT_NATIVE_CODE(EqualityCompare, 2, Location::RequiresRegister()) {
1904 ASSERT(compiler->is_optimizing()); 1925 ASSERT(compiler->is_optimizing());
1905 ASSERT((kind() == Token::kEQ) || (kind() == Token::kNE)); 1926 ASSERT((kind() == Token::kEQ) || (kind() == Token::kNE));
1906 Label is_true, is_false; 1927 Label is_true, is_false;
1907 // These labels are not used. They are arranged so that EmitComparisonCode 1928 // These labels are not used. They are arranged so that EmitComparisonCode
1908 // emits a test that executes the following instruction when the test 1929 // emits a test that executes the following instruction when the test
1909 // succeeds. 1930 // succeeds.
1910 BranchLabels labels = { &is_true, &is_false, &is_false }; 1931 BranchLabels labels = {&is_true, &is_false, &is_false};
1911 const Register result = locs()->out(0).reg(); 1932 const Register result = locs()->out(0).reg();
1912 __ LoadConstant(result, Bool::False()); 1933 __ LoadConstant(result, Bool::False());
1913 Condition true_condition = EmitComparisonCode(compiler, labels); 1934 Condition true_condition = EmitComparisonCode(compiler, labels);
1914 ASSERT(true_condition == NEXT_IS_TRUE); 1935 ASSERT(true_condition == NEXT_IS_TRUE);
1915 __ LoadConstant(result, Bool::True()); 1936 __ LoadConstant(result, Bool::True());
1916 } 1937 }
1917 1938
1918 1939
1919 void EqualityCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, 1940 void EqualityCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler,
1920 BranchInstr* branch) { 1941 BranchInstr* branch) {
(...skipping 11 matching lines...) Expand all
1932 } else { 1953 } else {
1933 ASSERT(operation_cid() == kDoubleCid); 1954 ASSERT(operation_cid() == kDoubleCid);
1934 return EmitDoubleComparisonOp(compiler, locs(), kind()); 1955 return EmitDoubleComparisonOp(compiler, locs(), kind());
1935 } 1956 }
1936 } 1957 }
1937 1958
1938 1959
1939 EMIT_NATIVE_CODE(RelationalOp, 2, Location::RequiresRegister()) { 1960 EMIT_NATIVE_CODE(RelationalOp, 2, Location::RequiresRegister()) {
1940 ASSERT(compiler->is_optimizing()); 1961 ASSERT(compiler->is_optimizing());
1941 Label is_true, is_false; 1962 Label is_true, is_false;
1942 BranchLabels labels = { &is_true, &is_false, &is_false }; 1963 BranchLabels labels = {&is_true, &is_false, &is_false};
1943 const Register result = locs()->out(0).reg(); 1964 const Register result = locs()->out(0).reg();
1944 __ LoadConstant(result, Bool::False()); 1965 __ LoadConstant(result, Bool::False());
1945 Condition true_condition = EmitComparisonCode(compiler, labels); 1966 Condition true_condition = EmitComparisonCode(compiler, labels);
1946 ASSERT(true_condition == NEXT_IS_TRUE); 1967 ASSERT(true_condition == NEXT_IS_TRUE);
1947 __ LoadConstant(result, Bool::True()); 1968 __ LoadConstant(result, Bool::True());
1948 } 1969 }
1949 1970
1950 1971
1951 void RelationalOpInstr::EmitBranchCode(FlowGraphCompiler* compiler, 1972 void RelationalOpInstr::EmitBranchCode(FlowGraphCompiler* compiler,
1952 BranchInstr* branch) { 1973 BranchInstr* branch) {
1953 BranchLabels labels = compiler->CreateBranchLabels(branch); 1974 BranchLabels labels = compiler->CreateBranchLabels(branch);
1954 Condition true_condition = EmitComparisonCode(compiler, labels); 1975 Condition true_condition = EmitComparisonCode(compiler, labels);
1955 EmitBranchOnCondition(compiler, true_condition, labels); 1976 EmitBranchOnCondition(compiler, true_condition, labels);
1956 } 1977 }
1957 1978
1958 1979
1959 EMIT_NATIVE_CODE(CheckArrayBound, 2) { 1980 EMIT_NATIVE_CODE(CheckArrayBound, 2) {
1960 const Register length = locs()->in(kLengthPos).reg(); 1981 const Register length = locs()->in(kLengthPos).reg();
1961 const Register index = locs()->in(kIndexPos).reg(); 1982 const Register index = locs()->in(kIndexPos).reg();
1962 const intptr_t index_cid = this->index()->Type()->ToCid(); 1983 const intptr_t index_cid = this->index()->Type()->ToCid();
1963 if (index_cid != kSmiCid) { 1984 if (index_cid != kSmiCid) {
1964 __ CheckSmi(index); 1985 __ CheckSmi(index);
1965 compiler->EmitDeopt(deopt_id(), 1986 compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckArrayBound,
1966 ICData::kDeoptCheckArrayBound,
1967 (generalized_ ? ICData::kGeneralized : 0) | 1987 (generalized_ ? ICData::kGeneralized : 0) |
1968 (licm_hoisted_ ? ICData::kHoisted : 0)); 1988 (licm_hoisted_ ? ICData::kHoisted : 0));
1969 } 1989 }
1970 __ IfULe(length, index); 1990 __ IfULe(length, index);
1971 compiler->EmitDeopt(deopt_id(), 1991 compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckArrayBound,
1972 ICData::kDeoptCheckArrayBound,
1973 (generalized_ ? ICData::kGeneralized : 0) | 1992 (generalized_ ? ICData::kGeneralized : 0) |
1974 (licm_hoisted_ ? ICData::kHoisted : 0)); 1993 (licm_hoisted_ ? ICData::kHoisted : 0));
1975 } 1994 }
1976 1995
1977 } // namespace dart 1996 } // namespace dart
1978 1997
1979 #endif // defined TARGET_ARCH_DBC 1998 #endif // defined TARGET_ARCH_DBC
OLDNEW
« no previous file with comments | « runtime/vm/intermediate_language_arm64.cc ('k') | runtime/vm/intermediate_language_ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698