Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(98)

Side by Side Diff: runtime/vm/intermediate_language_dbc.cc

Issue 1992963002: Enable optimizer pipeline for DBC. (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/intermediate_language.cc ('k') | runtime/vm/parser.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_DBC. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_DBC.
6 #if defined(TARGET_ARCH_DBC) 6 #if defined(TARGET_ARCH_DBC)
7 7
8 #include "vm/intermediate_language.h" 8 #include "vm/intermediate_language.h"
9 9
10 #include "vm/cpu.h" 10 #include "vm/cpu.h"
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
112 M(CheckClassId) \ 112 M(CheckClassId) \
113 M(CheckClass) \ 113 M(CheckClass) \
114 M(BinarySmiOp) \ 114 M(BinarySmiOp) \
115 M(TestSmi) \ 115 M(TestSmi) \
116 M(RelationalOp) \ 116 M(RelationalOp) \
117 M(EqualityCompare) \ 117 M(EqualityCompare) \
118 M(LoadIndexed) 118 M(LoadIndexed)
119 119
120 // Location summaries actually are not used by the unoptimizing DBC compiler 120 // Location summaries actually are not used by the unoptimizing DBC compiler
121 // because we don't allocate any registers. 121 // because we don't allocate any registers.
122 static LocationSummary* CreateLocationSummary(Zone* zone, 122 static LocationSummary* CreateLocationSummary(
123 intptr_t num_inputs, 123 Zone* zone,
124 bool has_result) { 124 intptr_t num_inputs,
125 Location output = Location::NoLocation(),
126 LocationSummary::ContainsCall contains_call = LocationSummary::kNoCall) {
125 const intptr_t kNumTemps = 0; 127 const intptr_t kNumTemps = 0;
126 LocationSummary* locs = new(zone) LocationSummary( 128 LocationSummary* locs = new(zone) LocationSummary(
127 zone, num_inputs, kNumTemps, LocationSummary::kNoCall); 129 zone, num_inputs, kNumTemps, contains_call);
128 for (intptr_t i = 0; i < num_inputs; i++) { 130 for (intptr_t i = 0; i < num_inputs; i++) {
129 locs->set_in(i, Location::RequiresRegister()); 131 locs->set_in(i, (contains_call == LocationSummary::kNoCall) ?
132 Location::RequiresRegister() : Location::RegisterLocation(i));
130 } 133 }
131 if (has_result) { 134 if (!output.IsInvalid()) {
132 locs->set_out(0, Location::RequiresRegister()); 135 // For instructions that call we default to returning result in R0.
136 locs->set_out(0, output);
133 } 137 }
134 return locs; 138 return locs;
135 } 139 }
136 140
137 141
138 #define DEFINE_MAKE_LOCATION_SUMMARY(Name, In, Out) \ 142 #define DEFINE_MAKE_LOCATION_SUMMARY(Name, ...) \
139 LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \ 143 LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \
140 const { \ 144 const { \
141 return CreateLocationSummary(zone, In, Out); \ 145 return CreateLocationSummary(zone, __VA_ARGS__); \
142 } \ 146 } \
143 147
144 #define EMIT_NATIVE_CODE(Name, In, Out) \ 148 #define EMIT_NATIVE_CODE(Name, ...) \
145 DEFINE_MAKE_LOCATION_SUMMARY(Name, In, Out); \ 149 DEFINE_MAKE_LOCATION_SUMMARY(Name, __VA_ARGS__); \
146 void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) \ 150 void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) \
147 151
148 #define DEFINE_UNIMPLEMENTED_MAKE_LOCATION_SUMMARY(Name) \ 152 #define DEFINE_UNIMPLEMENTED_MAKE_LOCATION_SUMMARY(Name) \
149 LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \ 153 LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \
150 const { \ 154 const { \
151 UNIMPLEMENTED(); \ 155 if (!opt) UNIMPLEMENTED(); \
152 return NULL; \ 156 return NULL; \
153 } \ 157 } \
154 158
155 #define DEFINE_UNIMPLEMENTED_EMIT_NATIVE_CODE(Name) \ 159 #define DEFINE_UNIMPLEMENTED_EMIT_NATIVE_CODE(Name) \
156 void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) { \ 160 void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) { \
157 UNIMPLEMENTED(); \ 161 UNIMPLEMENTED(); \
158 } 162 }
159 163
160 #define DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(Name) \ 164 #define DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(Name) \
161 void Name##Instr::EmitBranchCode(FlowGraphCompiler*, BranchInstr*) { \ 165 void Name##Instr::EmitBranchCode(FlowGraphCompiler*, BranchInstr*) { \
(...skipping 12 matching lines...) Expand all
174 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED) 178 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED)
175 179
176 #undef DEFINE_UNIMPLEMENTED 180 #undef DEFINE_UNIMPLEMENTED
177 181
178 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(TestCids) 182 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(TestCids)
179 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(TestSmi) 183 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(TestSmi)
180 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(RelationalOp) 184 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(RelationalOp)
181 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(EqualityCompare) 185 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(EqualityCompare)
182 186
183 187
184 DEFINE_MAKE_LOCATION_SUMMARY(AssertAssignable, 2, true); 188 DEFINE_MAKE_LOCATION_SUMMARY(AssertAssignable, 2, Location::SameAsFirstInput());
185 189
186 190
187 EMIT_NATIVE_CODE(AssertBoolean, 1, true) { 191 EMIT_NATIVE_CODE(AssertBoolean,
192 1, Location::SameAsFirstInput(),
193 LocationSummary::kCall) {
194 if (compiler->is_optimizing()) {
195 __ Push(locs()->in(0).reg());
196 }
188 __ AssertBoolean(Isolate::Current()->type_checks() ? 1 : 0); 197 __ AssertBoolean(Isolate::Current()->type_checks() ? 1 : 0);
198 compiler->RecordSafepoint(locs());
189 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 199 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
190 deopt_id(), 200 deopt_id(),
191 token_pos()); 201 token_pos());
202 if (compiler->is_optimizing()) {
203 __ Drop1();
204 }
192 } 205 }
193 206
194 207
195 LocationSummary* PolymorphicInstanceCallInstr::MakeLocationSummary(Zone* zone, 208 LocationSummary* PolymorphicInstanceCallInstr::MakeLocationSummary(
196 bool optimizing) const { 209 Zone* zone, bool optimizing) const {
197 return MakeCallSummary(zone); 210 return MakeCallSummary(zone);
198 } 211 }
199 212
200 213
201 void PolymorphicInstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 214 void PolymorphicInstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
202 UNIMPLEMENTED(); 215 compiler->Bailout(ToCString());
203 } 216 }
204 217
205 218
206 EMIT_NATIVE_CODE(CheckStackOverflow, 0, false) { 219 EMIT_NATIVE_CODE(CheckStackOverflow,
220 0, Location::NoLocation(),
221 LocationSummary::kCall) {
207 __ CheckStack(); 222 __ CheckStack();
223 compiler->RecordSafepoint(locs());
208 compiler->AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, 224 compiler->AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall,
209 Thread::kNoDeoptId, 225 Thread::kNoDeoptId,
210 token_pos()); 226 token_pos());
211 } 227 }
212 228
213 229
214 EMIT_NATIVE_CODE(PushArgument, 1, false) { 230 EMIT_NATIVE_CODE(PushArgument, 1) {
215 if (compiler->is_optimizing()) { 231 if (compiler->is_optimizing()) {
216 __ Push(locs()->in(0).reg()); 232 __ Push(locs()->in(0).reg());
217 } 233 }
218 } 234 }
219 235
220 236
221 EMIT_NATIVE_CODE(LoadLocal, 0, false) { 237 EMIT_NATIVE_CODE(LoadLocal, 0) {
222 ASSERT(!compiler->is_optimizing()); 238 ASSERT(!compiler->is_optimizing());
223 ASSERT(local().index() != 0); 239 ASSERT(local().index() != 0);
224 __ Push((local().index() > 0) ? (-local().index()) : (-local().index() - 1)); 240 __ Push((local().index() > 0) ? (-local().index()) : (-local().index() - 1));
225 } 241 }
226 242
227 243
228 EMIT_NATIVE_CODE(StoreLocal, 0, false) { 244 EMIT_NATIVE_CODE(StoreLocal, 0) {
229 ASSERT(!compiler->is_optimizing()); 245 ASSERT(!compiler->is_optimizing());
230 ASSERT(local().index() != 0); 246 ASSERT(local().index() != 0);
231 if (HasTemp()) { 247 if (HasTemp()) {
232 __ StoreLocal( 248 __ StoreLocal(
233 (local().index() > 0) ? (-local().index()) : (-local().index() - 1)); 249 (local().index() > 0) ? (-local().index()) : (-local().index() - 1));
234 } else { 250 } else {
235 __ PopLocal( 251 __ PopLocal(
236 (local().index() > 0) ? (-local().index()) : (-local().index() - 1)); 252 (local().index() > 0) ? (-local().index()) : (-local().index() - 1));
237 } 253 }
238 } 254 }
239 255
240 256
241 EMIT_NATIVE_CODE(LoadClassId, 1, true) { 257 EMIT_NATIVE_CODE(LoadClassId, 1, Location::RequiresRegister()) {
242 if (compiler->is_optimizing()) { 258 if (compiler->is_optimizing()) {
243 __ LoadClassId(locs()->out(0).reg(), locs()->in(0).reg()); 259 __ LoadClassId(locs()->out(0).reg(), locs()->in(0).reg());
244 } else { 260 } else {
245 __ LoadClassIdTOS(); 261 __ LoadClassIdTOS();
246 } 262 }
247 } 263 }
248 264
249 265
250 EMIT_NATIVE_CODE(Constant, 0, true) { 266 EMIT_NATIVE_CODE(Constant, 0, Location::RequiresRegister()) {
251 const intptr_t kidx = __ AddConstant(value());
252 if (compiler->is_optimizing()) { 267 if (compiler->is_optimizing()) {
253 __ LoadConstant(locs()->out(0).reg(), kidx); 268 __ LoadConstant(locs()->out(0).reg(), value());
254 } else { 269 } else {
255 __ PushConstant(kidx); 270 __ PushConstant(value());
256 } 271 }
257 } 272 }
258 273
259 274
260 EMIT_NATIVE_CODE(Return, 1, false) { 275 EMIT_NATIVE_CODE(Return, 1) {
261 __ ReturnTOS(); 276 if (compiler->is_optimizing()) {
277 __ Return(locs()->in(0).reg());
278 } else {
279 __ ReturnTOS();
280 }
262 } 281 }
263 282
264 283
265 EMIT_NATIVE_CODE(StoreStaticField, 1, false) { 284 LocationSummary* StoreStaticFieldInstr::MakeLocationSummary(
266 const intptr_t kidx = __ AddConstant(field()); 285 Zone* zone, bool opt) const {
267 __ StoreStaticTOS(kidx); 286 const intptr_t kNumInputs = 1;
287 const intptr_t kNumTemps = 1;
288 LocationSummary* locs = new(zone) LocationSummary(
289 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall);
290 for (intptr_t i = 0; i < kNumInputs; i++) {
291 locs->set_in(i, Location::RequiresRegister());
292 }
293 for (intptr_t i = 0; i < kNumTemps; i++) {
294 locs->set_temp(i, Location::RequiresRegister());
295 }
296 return locs;
268 } 297 }
269 298
270 299
271 EMIT_NATIVE_CODE(LoadStaticField, 1, true) { 300 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
272 const intptr_t kidx = __ AddConstant(StaticField()); 301 if (compiler->is_optimizing()) {
273 __ PushStatic(kidx); 302 __ LoadConstant(locs()->temp(0).reg(),
303 Field::ZoneHandle(field().Original()));
304 __ StoreField(locs()->temp(0).reg(),
305 Field::static_value_offset() / kWordSize,
306 locs()->in(0).reg());
307 } else {
308 const intptr_t kidx = __ AddConstant(field());
309 __ StoreStaticTOS(kidx);
310 }
274 } 311 }
275 312
276 313
277 EMIT_NATIVE_CODE(InitStaticField, 0, false) { 314 EMIT_NATIVE_CODE(LoadStaticField, 1, Location::RequiresRegister()) {
315 if (compiler->is_optimizing()) {
316 __ LoadField(locs()->out(0).reg(),
317 locs()->in(0).reg(),
318 Field::static_value_offset() / kWordSize);
319 } else {
320 const intptr_t kidx = __ AddConstant(StaticField());
321 __ PushStatic(kidx);
322 }
323 }
324
325
326 EMIT_NATIVE_CODE(InitStaticField, 0) {
278 ASSERT(!compiler->is_optimizing()); 327 ASSERT(!compiler->is_optimizing());
279 __ InitStaticTOS(); 328 __ InitStaticTOS();
280 } 329 }
281 330
282 331
283 EMIT_NATIVE_CODE(ClosureCall, 0, false) { 332 EMIT_NATIVE_CODE(ClosureCall,
333 1,
334 Location::RegisterLocation(0),
335 LocationSummary::kCall) {
336 if (compiler->is_optimizing()) {
337 __ Push(locs()->in(0).reg());
338 }
339
284 intptr_t argument_count = ArgumentCount(); 340 intptr_t argument_count = ArgumentCount();
285 const Array& arguments_descriptor = 341 const Array& arguments_descriptor =
286 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, 342 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count,
287 argument_names())); 343 argument_names()));
288 const intptr_t argdesc_kidx = 344 const intptr_t argdesc_kidx =
289 compiler->assembler()->AddConstant(arguments_descriptor); 345 compiler->assembler()->AddConstant(arguments_descriptor);
290 __ StaticCall(argument_count, argdesc_kidx); 346 __ StaticCall(argument_count, argdesc_kidx);
347 compiler->RecordAfterCall(this);
291 348
292 compiler->RecordSafepoint(locs());
293 // Marks either the continuation point in unoptimized code or the
294 // deoptimization point in optimized code, after call.
295 const intptr_t deopt_id_after = Thread::ToDeoptAfter(deopt_id());
296 if (compiler->is_optimizing()) { 349 if (compiler->is_optimizing()) {
297 compiler->AddDeoptIndexAtCall(deopt_id_after, token_pos()); 350 __ PopLocal(locs()->out(0).reg());
298 } 351 }
299 // Add deoptimization continuation point after the call and before the
300 // arguments are removed.
301 // In optimized code this descriptor is needed for exception handling.
302 compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt,
303 deopt_id_after,
304 token_pos());
305 } 352 }
306 353
307 354
308 static void EmitBranchOnCondition(FlowGraphCompiler* compiler, 355 static void EmitBranchOnCondition(FlowGraphCompiler* compiler,
309 Condition true_condition, 356 Condition true_condition,
310 BranchLabels labels) { 357 BranchLabels labels) {
311 if (labels.fall_through == labels.false_label) { 358 if (labels.fall_through == labels.false_label) {
312 // If the next block is the false successor, fall through to it. 359 // If the next block is the false successor, fall through to it.
313 __ Jump(labels.true_label); 360 __ Jump(labels.true_label);
314 } else { 361 } else {
315 // If the next block is not the false successor, branch to it. 362 // If the next block is not the false successor, branch to it.
316 __ Jump(labels.false_label); 363 __ Jump(labels.false_label);
317 364
318 // Fall through or jump to the true successor. 365 // Fall through or jump to the true successor.
319 if (labels.fall_through != labels.true_label) { 366 if (labels.fall_through != labels.true_label) {
320 __ Jump(labels.true_label); 367 __ Jump(labels.true_label);
321 } 368 }
322 } 369 }
323 } 370 }
324 371
325 372
326 Condition StrictCompareInstr::EmitComparisonCode(FlowGraphCompiler* compiler, 373 Condition StrictCompareInstr::EmitComparisonCode(FlowGraphCompiler* compiler,
327 BranchLabels labels) { 374 BranchLabels labels) {
328 ASSERT((kind() == Token::kNE_STRICT) || 375 ASSERT((kind() == Token::kNE_STRICT) ||
329 (kind() == Token::kEQ_STRICT)); 376 (kind() == Token::kEQ_STRICT));
330 const Bytecode::Opcode eq_op = needs_number_check() ?
331 Bytecode::kIfEqStrictNumTOS : Bytecode::kIfEqStrictTOS;
332 const Bytecode::Opcode ne_op = needs_number_check() ?
333 Bytecode::kIfNeStrictNumTOS : Bytecode::kIfNeStrictTOS;
334 377
335 if (kind() == Token::kEQ_STRICT) { 378 if (!compiler->is_optimizing()) {
336 __ Emit((labels.fall_through == labels.false_label) ? eq_op : ne_op); 379 const Bytecode::Opcode eq_op = needs_number_check() ?
380 Bytecode::kIfEqStrictNumTOS : Bytecode::kIfEqStrictTOS;
381 const Bytecode::Opcode ne_op = needs_number_check() ?
382 Bytecode::kIfNeStrictNumTOS : Bytecode::kIfNeStrictTOS;
383
384 if (kind() == Token::kEQ_STRICT) {
385 __ Emit((labels.fall_through == labels.false_label) ? eq_op : ne_op);
386 } else {
387 __ Emit((labels.fall_through == labels.false_label) ? ne_op : eq_op);
388 }
337 } else { 389 } else {
338 __ Emit((labels.fall_through == labels.false_label) ? ne_op : eq_op); 390 const Bytecode::Opcode eq_op = needs_number_check() ?
391 Bytecode::kIfEqStrictNum : Bytecode::kIfEqStrict;
392 const Bytecode::Opcode ne_op = needs_number_check() ?
393 Bytecode::kIfNeStrictNum : Bytecode::kIfNeStrict;
394
395 if (kind() == Token::kEQ_STRICT) {
396 __ Emit(Bytecode::Encode(
397 (labels.fall_through == labels.false_label) ? eq_op : ne_op,
398 locs()->in(0).reg(),
399 locs()->in(1).reg()));
400 } else {
401 __ Emit(Bytecode::Encode(
402 (labels.fall_through == labels.false_label) ? ne_op : eq_op,
403 locs()->in(0).reg(),
404 locs()->in(1).reg()));
405 }
339 } 406 }
340 407
341 if (needs_number_check() && token_pos().IsReal()) { 408 if (needs_number_check() && token_pos().IsReal()) {
409 compiler->RecordSafepoint(locs());
342 compiler->AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, 410 compiler->AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall,
343 Thread::kNoDeoptId, 411 Thread::kNoDeoptId,
344 token_pos()); 412 token_pos());
345 } 413 }
346 return EQ; 414 return EQ;
347 } 415 }
348 416
349 417
350 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, 418 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler,
351 BranchInstr* branch) { 419 BranchInstr* branch) {
352 ASSERT((kind() == Token::kEQ_STRICT) || 420 ASSERT((kind() == Token::kEQ_STRICT) ||
353 (kind() == Token::kNE_STRICT)); 421 (kind() == Token::kNE_STRICT));
354 422
355 BranchLabels labels = compiler->CreateBranchLabels(branch); 423 BranchLabels labels = compiler->CreateBranchLabels(branch);
356 Condition true_condition = EmitComparisonCode(compiler, labels); 424 Condition true_condition = EmitComparisonCode(compiler, labels);
357 EmitBranchOnCondition(compiler, true_condition, labels); 425 EmitBranchOnCondition(compiler, true_condition, labels);
358 } 426 }
359 427
360 428
361 EMIT_NATIVE_CODE(StrictCompare, 2, true) { 429 EMIT_NATIVE_CODE(StrictCompare,
430 2,
431 Location::RequiresRegister(),
432 needs_number_check() ? LocationSummary::kCall
433 : LocationSummary::kNoCall) {
362 ASSERT((kind() == Token::kEQ_STRICT) || 434 ASSERT((kind() == Token::kEQ_STRICT) ||
363 (kind() == Token::kNE_STRICT)); 435 (kind() == Token::kNE_STRICT));
364 436
365 Label is_true, is_false; 437 Label is_true, is_false;
366 BranchLabels labels = { &is_true, &is_false, &is_false }; 438 BranchLabels labels = { &is_true, &is_false, &is_false };
367 Condition true_condition = EmitComparisonCode(compiler, labels); 439 Condition true_condition = EmitComparisonCode(compiler, labels);
368 EmitBranchOnCondition(compiler, true_condition, labels); 440 EmitBranchOnCondition(compiler, true_condition, labels);
369 Label done; 441 Label done;
370 __ Bind(&is_false); 442 if (compiler->is_optimizing()) {
371 __ PushConstant(Bool::False()); 443 const Register result = locs()->out(0).reg();
372 __ Jump(&done); 444 __ Bind(&is_false);
373 __ Bind(&is_true); 445 __ LoadConstant(result, Bool::False());
374 __ PushConstant(Bool::True()); 446 __ Jump(&done);
375 __ Bind(&done); 447 __ Bind(&is_true);
448 __ LoadConstant(result, Bool::True());
449 __ Bind(&done);
450 } else {
451 __ Bind(&is_false);
452 __ PushConstant(Bool::False());
453 __ Jump(&done);
454 __ Bind(&is_true);
455 __ PushConstant(Bool::True());
456 __ Bind(&done);
457 }
376 } 458 }
377 459
378 460
379 LocationSummary* BranchInstr::MakeLocationSummary(Zone* zone, 461 LocationSummary* BranchInstr::MakeLocationSummary(Zone* zone,
380 bool opt) const { 462 bool opt) const {
381 comparison()->InitializeLocationSummary(zone, opt); 463 comparison()->InitializeLocationSummary(zone, opt);
464 if (!comparison()->HasLocs()) {
465 return NULL;
466 }
382 // Branches don't produce a result. 467 // Branches don't produce a result.
383 comparison()->locs()->set_out(0, Location::NoLocation()); 468 comparison()->locs()->set_out(0, Location::NoLocation());
384 return comparison()->locs(); 469 return comparison()->locs();
385 } 470 }
386 471
387 472
388 void BranchInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 473 void BranchInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
389 comparison()->EmitBranchCode(compiler, this); 474 comparison()->EmitBranchCode(compiler, this);
390 } 475 }
391 476
392 477
393 EMIT_NATIVE_CODE(Goto, 0, false) { 478 EMIT_NATIVE_CODE(Goto, 0) {
394 if (HasParallelMove()) { 479 if (HasParallelMove()) {
395 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); 480 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
396 } 481 }
397 // We can fall through if the successor is the next block in the list. 482 // We can fall through if the successor is the next block in the list.
398 // Otherwise, we need a jump. 483 // Otherwise, we need a jump.
399 if (!compiler->CanFallThroughTo(successor())) { 484 if (!compiler->CanFallThroughTo(successor())) {
400 __ Jump(compiler->GetJumpLabel(successor())); 485 __ Jump(compiler->GetJumpLabel(successor()));
401 } 486 }
402 } 487 }
403 488
404 489
405 EMIT_NATIVE_CODE(CreateArray, 2, true) { 490 EMIT_NATIVE_CODE(CreateArray,
491 2, Location::RequiresRegister(),
492 LocationSummary::kCall) {
493 if (compiler->is_optimizing()) {
494 __ Push(locs()->in(0).reg());
495 __ Push(locs()->in(1).reg());
496 }
406 __ CreateArrayTOS(); 497 __ CreateArrayTOS();
498 compiler->RecordSafepoint(locs());
499 if (compiler->is_optimizing()) {
500 __ PopLocal(locs()->out(0).reg());
501 }
407 } 502 }
408 503
409 504
410 EMIT_NATIVE_CODE(StoreIndexed, 3, false) { 505 EMIT_NATIVE_CODE(StoreIndexed, 3) {
411 ASSERT(class_id() == kArrayCid); 506 if (compiler->is_optimizing()) {
412 __ StoreIndexedTOS(); 507 if (class_id() != kArrayCid) {
508 compiler->Bailout(ToCString());
509 }
510
511 __ StoreIndexed(locs()->in(kArrayPos).reg(),
512 locs()->in(kIndexPos).reg(),
513 locs()->in(kValuePos).reg());
514 } else {
515 ASSERT(class_id() == kArrayCid);
516 __ StoreIndexedTOS();
517 }
413 } 518 }
414 519
415 520
416 EMIT_NATIVE_CODE(StringInterpolate, 0, false) { 521 EMIT_NATIVE_CODE(StringInterpolate,
522 1, Location::RegisterLocation(0),
523 LocationSummary::kCall) {
524 if (compiler->is_optimizing()) {
525 __ Push(locs()->in(0).reg());
526 }
417 const intptr_t kArgumentCount = 1; 527 const intptr_t kArgumentCount = 1;
418 const Array& arguments_descriptor = Array::Handle( 528 const Array& arguments_descriptor = Array::Handle(
419 ArgumentsDescriptor::New(kArgumentCount, Object::null_array())); 529 ArgumentsDescriptor::New(kArgumentCount, Object::null_array()));
420 __ PushConstant(CallFunction()); 530 __ PushConstant(CallFunction());
421 const intptr_t argdesc_kidx = __ AddConstant(arguments_descriptor); 531 const intptr_t argdesc_kidx = __ AddConstant(arguments_descriptor);
422 __ StaticCall(kArgumentCount, argdesc_kidx); 532 __ StaticCall(kArgumentCount, argdesc_kidx);
533 compiler->RecordAfterCall(this);
534
535 if (compiler->is_optimizing()) {
536 __ PopLocal(locs()->out(0).reg());
537 }
423 } 538 }
424 539
425 540
426 EMIT_NATIVE_CODE(NativeCall, 0, false) { 541 EMIT_NATIVE_CODE(NativeCall,
542 0, Location::NoLocation(),
543 LocationSummary::kCall) {
427 SetupNative(); 544 SetupNative();
428 545
429 const intptr_t argc_tag = NativeArguments::ComputeArgcTag(function()); 546 const intptr_t argc_tag = NativeArguments::ComputeArgcTag(function());
430 547
431 ASSERT(!link_lazily()); 548 ASSERT(!link_lazily());
432 const ExternalLabel label(reinterpret_cast<uword>(native_c_function())); 549 const ExternalLabel label(reinterpret_cast<uword>(native_c_function()));
433 const intptr_t target_kidx = 550 const intptr_t target_kidx =
434 __ object_pool_wrapper().FindImmediate(label.address()); 551 __ object_pool_wrapper().FindImmediate(label.address());
435 const intptr_t argc_tag_kidx = 552 const intptr_t argc_tag_kidx =
436 __ object_pool_wrapper().FindImmediate(static_cast<uword>(argc_tag)); 553 __ object_pool_wrapper().FindImmediate(static_cast<uword>(argc_tag));
437 __ PushConstant(target_kidx); 554 __ PushConstant(target_kidx);
438 __ PushConstant(argc_tag_kidx); 555 __ PushConstant(argc_tag_kidx);
439 if (is_bootstrap_native()) { 556 if (is_bootstrap_native()) {
440 __ NativeBootstrapCall(); 557 __ NativeBootstrapCall();
441 } else { 558 } else {
442 __ NativeCall(); 559 __ NativeCall();
443 } 560 }
561 compiler->RecordSafepoint(locs());
444 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 562 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
445 Thread::kNoDeoptId, 563 Thread::kNoDeoptId,
446 token_pos()); 564 token_pos());
447 } 565 }
448 566
449 567
450 EMIT_NATIVE_CODE(AllocateObject, 0, true) { 568 EMIT_NATIVE_CODE(AllocateObject,
569 0, Location::RequiresRegister(),
570 LocationSummary::kCall) {
451 if (ArgumentCount() == 1) { 571 if (ArgumentCount() == 1) {
452 __ PushConstant(cls()); 572 __ PushConstant(cls());
453 __ AllocateT(); 573 __ AllocateT();
454 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 574 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
455 Thread::kNoDeoptId, 575 Thread::kNoDeoptId,
456 token_pos()); 576 token_pos());
457 } else { 577 } else {
458 const intptr_t kidx = __ AddConstant(cls()); 578 const intptr_t kidx = __ AddConstant(cls());
459 __ Allocate(kidx); 579 __ Allocate(kidx);
460 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 580 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
461 Thread::kNoDeoptId, 581 Thread::kNoDeoptId,
462 token_pos()); 582 token_pos());
463 } 583 }
584 compiler->RecordSafepoint(locs());
585 if (compiler->is_optimizing()) {
586 __ PopLocal(locs()->out(0).reg());
587 }
464 } 588 }
465 589
466 590
467 EMIT_NATIVE_CODE(StoreInstanceField, 2, false) { 591 EMIT_NATIVE_CODE(StoreInstanceField, 2) {
468 ASSERT(!HasTemp()); 592 ASSERT(!HasTemp());
469 ASSERT(offset_in_bytes() % kWordSize == 0); 593 ASSERT(offset_in_bytes() % kWordSize == 0);
470 if (compiler->is_optimizing()) { 594 if (compiler->is_optimizing()) {
471 const Register value = locs()->in(1).reg(); 595 const Register value = locs()->in(1).reg();
472 const Register instance = locs()->in(0).reg(); 596 const Register instance = locs()->in(0).reg();
473 __ StoreField(instance, offset_in_bytes() / kWordSize, value); 597 __ StoreField(instance, offset_in_bytes() / kWordSize, value);
474 } else { 598 } else {
475 __ StoreFieldTOS(offset_in_bytes() / kWordSize); 599 __ StoreFieldTOS(offset_in_bytes() / kWordSize);
476 } 600 }
477 } 601 }
478 602
479 603
480 EMIT_NATIVE_CODE(LoadField, 1, true) { 604 EMIT_NATIVE_CODE(LoadField, 1, Location::RequiresRegister()) {
481 ASSERT(offset_in_bytes() % kWordSize == 0); 605 ASSERT(offset_in_bytes() % kWordSize == 0);
482 __ LoadFieldTOS(offset_in_bytes() / kWordSize); 606 if (compiler->is_optimizing()) {
607 const Register result = locs()->out(0).reg();
608 const Register instance = locs()->in(0).reg();
609 __ LoadField(result, instance, offset_in_bytes() / kWordSize);
610 } else {
611 __ LoadFieldTOS(offset_in_bytes() / kWordSize);
612 }
483 } 613 }
484 614
485 615
486 EMIT_NATIVE_CODE(BooleanNegate, 1, true) { 616 EMIT_NATIVE_CODE(BooleanNegate, 1, Location::RequiresRegister()) {
487 __ BooleanNegateTOS(); 617 if (compiler->is_optimizing()) {
618 __ BooleanNegate(locs()->out(0).reg(), locs()->in(0).reg());
619 } else {
620 __ BooleanNegateTOS();
621 }
488 } 622 }
489 623
490 624
491 EMIT_NATIVE_CODE(AllocateContext, 0, false) { 625 EMIT_NATIVE_CODE(AllocateContext,
626 0, Location::RequiresRegister(),
627 LocationSummary::kCall) {
628 ASSERT(!compiler->is_optimizing());
492 __ AllocateContext(num_context_variables()); 629 __ AllocateContext(num_context_variables());
630 compiler->RecordSafepoint(locs());
493 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 631 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
494 Thread::kNoDeoptId, 632 Thread::kNoDeoptId,
495 token_pos()); 633 token_pos());
496 } 634 }
497 635
498 636
499 EMIT_NATIVE_CODE(CloneContext, 0, false) { 637 EMIT_NATIVE_CODE(CloneContext,
638 1, Location::RequiresRegister(),
639 LocationSummary::kCall) {
640 ASSERT(!compiler->is_optimizing());
500 __ CloneContext(); 641 __ CloneContext();
642 compiler->RecordSafepoint(locs());
501 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 643 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
502 Thread::kNoDeoptId, 644 Thread::kNoDeoptId,
503 token_pos()); 645 token_pos());
504 } 646 }
505 647
506 648
507 EMIT_NATIVE_CODE(CatchBlockEntry, 0, false) { 649 EMIT_NATIVE_CODE(CatchBlockEntry, 0) {
508 __ Bind(compiler->GetJumpLabel(this)); 650 __ Bind(compiler->GetJumpLabel(this));
509 compiler->AddExceptionHandler(catch_try_index(), 651 compiler->AddExceptionHandler(catch_try_index(),
510 try_index(), 652 try_index(),
511 compiler->assembler()->CodeSize(), 653 compiler->assembler()->CodeSize(),
512 catch_handler_types_, 654 catch_handler_types_,
513 needs_stacktrace()); 655 needs_stacktrace());
514 __ MoveSpecial(-exception_var().index()-1, 656 __ MoveSpecial(-exception_var().index()-1,
515 Simulator::kExceptionSpecialIndex); 657 Simulator::kExceptionSpecialIndex);
516 __ MoveSpecial(-stacktrace_var().index()-1, 658 __ MoveSpecial(-stacktrace_var().index()-1,
517 Simulator::kStacktraceSpecialIndex); 659 Simulator::kStacktraceSpecialIndex);
518 __ SetFrame(compiler->StackSize()); 660 __ SetFrame(compiler->StackSize());
519 } 661 }
520 662
521 663
522 EMIT_NATIVE_CODE(Throw, 0, false) { 664 EMIT_NATIVE_CODE(Throw, 0, Location::NoLocation(), LocationSummary::kCall) {
523 __ Throw(0); 665 __ Throw(0);
666 compiler->RecordSafepoint(locs());
524 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 667 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
525 deopt_id(), 668 deopt_id(),
526 token_pos()); 669 token_pos());
527 __ Trap(); 670 __ Trap();
528 } 671 }
529 672
530 673
531 EMIT_NATIVE_CODE(ReThrow, 0, false) { 674 EMIT_NATIVE_CODE(ReThrow, 0, Location::NoLocation(), LocationSummary::kCall) {
532 compiler->SetNeedsStacktrace(catch_try_index()); 675 compiler->SetNeedsStacktrace(catch_try_index());
533 __ Throw(1); 676 __ Throw(1);
677 compiler->RecordSafepoint(locs());
534 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 678 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
535 deopt_id(), 679 deopt_id(),
536 token_pos()); 680 token_pos());
537 __ Trap(); 681 __ Trap();
538 } 682 }
539 683
540 EMIT_NATIVE_CODE(InstantiateType, 1, true) { 684 EMIT_NATIVE_CODE(InstantiateType,
685 1, Location::RequiresRegister(),
686 LocationSummary::kCall) {
687 if (compiler->is_optimizing()) {
688 __ Push(locs()->in(0).reg());
689 }
541 __ InstantiateType(__ AddConstant(type())); 690 __ InstantiateType(__ AddConstant(type()));
691 compiler->RecordSafepoint(locs());
542 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 692 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
543 deopt_id(), 693 deopt_id(),
544 token_pos()); 694 token_pos());
695 if (compiler->is_optimizing()) {
696 __ PopLocal(locs()->out(0).reg());
697 }
545 } 698 }
546 699
547 EMIT_NATIVE_CODE(InstantiateTypeArguments, 1, true) { 700 EMIT_NATIVE_CODE(InstantiateTypeArguments,
701 1, Location::RequiresRegister(),
702 LocationSummary::kCall) {
703 if (compiler->is_optimizing()) {
704 __ Push(locs()->in(0).reg());
705 }
548 __ InstantiateTypeArgumentsTOS( 706 __ InstantiateTypeArgumentsTOS(
549 type_arguments().IsRawInstantiatedRaw(type_arguments().Length()), 707 type_arguments().IsRawInstantiatedRaw(type_arguments().Length()),
550 __ AddConstant(type_arguments())); 708 __ AddConstant(type_arguments()));
709 compiler->RecordSafepoint(locs());
551 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, 710 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
552 deopt_id(), 711 deopt_id(),
553 token_pos()); 712 token_pos());
713 if (compiler->is_optimizing()) {
714 __ PopLocal(locs()->out(0).reg());
715 }
554 } 716 }
555 717
556 718
557 void DebugStepCheckInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 719 void DebugStepCheckInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
558 __ DebugStep(); 720 __ DebugStep();
559 compiler->AddCurrentDescriptor(stub_kind_, Thread::kNoDeoptId, token_pos()); 721 compiler->AddCurrentDescriptor(stub_kind_, Thread::kNoDeoptId, token_pos());
560 } 722 }
561 723
562 724
563 void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 725 void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
564 if (!compiler->CanFallThroughTo(normal_entry())) { 726 if (!compiler->CanFallThroughTo(normal_entry())) {
565 __ Jump(compiler->GetJumpLabel(normal_entry())); 727 __ Jump(compiler->GetJumpLabel(normal_entry()));
566 } 728 }
567 } 729 }
568 730
569 731
570 LocationSummary* Instruction::MakeCallSummary(Zone* zone) { 732 LocationSummary* Instruction::MakeCallSummary(Zone* zone) {
571 LocationSummary* result = new(zone) LocationSummary( 733 LocationSummary* result = new(zone) LocationSummary(
572 zone, 0, 0, LocationSummary::kCall); 734 zone, 0, 0, LocationSummary::kCall);
573 result->set_out(0, Location::RequiresRegister()); 735 // TODO(vegorov) support allocating out registers for calls.
736 // Currently we require them to be fixed.
737 result->set_out(0, Location::RegisterLocation(0));
574 return result; 738 return result;
575 } 739 }
576 740
577 741
578 CompileType BinaryUint32OpInstr::ComputeType() const { 742 CompileType BinaryUint32OpInstr::ComputeType() const {
579 return CompileType::Int(); 743 return CompileType::Int();
580 } 744 }
581 745
582 746
583 CompileType ShiftUint32OpInstr::ComputeType() const { 747 CompileType ShiftUint32OpInstr::ComputeType() const {
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
617 781
618 case kTypedDataInt8ArrayCid: 782 case kTypedDataInt8ArrayCid:
619 case kTypedDataUint8ArrayCid: 783 case kTypedDataUint8ArrayCid:
620 case kTypedDataUint8ClampedArrayCid: 784 case kTypedDataUint8ClampedArrayCid:
621 case kExternalTypedDataUint8ArrayCid: 785 case kExternalTypedDataUint8ArrayCid:
622 case kExternalTypedDataUint8ClampedArrayCid: 786 case kExternalTypedDataUint8ClampedArrayCid:
623 case kTypedDataInt16ArrayCid: 787 case kTypedDataInt16ArrayCid:
624 case kTypedDataUint16ArrayCid: 788 case kTypedDataUint16ArrayCid:
625 case kOneByteStringCid: 789 case kOneByteStringCid:
626 case kTwoByteStringCid: 790 case kTwoByteStringCid:
791 case kExternalOneByteStringCid:
792 case kExternalTwoByteStringCid:
627 return CompileType::FromCid(kSmiCid); 793 return CompileType::FromCid(kSmiCid);
628 794
629 case kTypedDataInt32ArrayCid: 795 case kTypedDataInt32ArrayCid:
630 case kTypedDataUint32ArrayCid: 796 case kTypedDataUint32ArrayCid:
631 return CompileType::Int(); 797 return CompileType::Int();
632 798
633 default: 799 default:
634 UNREACHABLE(); 800 UNREACHABLE();
635 return CompileType::Dynamic(); 801 return CompileType::Dynamic();
636 } 802 }
637 } 803 }
638 804
639 805
640 Representation LoadIndexedInstr::representation() const { 806 Representation LoadIndexedInstr::representation() const {
641 switch (class_id_) { 807 switch (class_id_) {
642 case kArrayCid: 808 case kArrayCid:
643 case kImmutableArrayCid: 809 case kImmutableArrayCid:
644 case kTypedDataInt8ArrayCid: 810 case kTypedDataInt8ArrayCid:
645 case kTypedDataUint8ArrayCid: 811 case kTypedDataUint8ArrayCid:
646 case kTypedDataUint8ClampedArrayCid: 812 case kTypedDataUint8ClampedArrayCid:
647 case kExternalTypedDataUint8ArrayCid: 813 case kExternalTypedDataUint8ArrayCid:
648 case kExternalTypedDataUint8ClampedArrayCid: 814 case kExternalTypedDataUint8ClampedArrayCid:
649 case kTypedDataInt16ArrayCid: 815 case kTypedDataInt16ArrayCid:
650 case kTypedDataUint16ArrayCid: 816 case kTypedDataUint16ArrayCid:
651 case kOneByteStringCid: 817 case kOneByteStringCid:
652 case kTwoByteStringCid: 818 case kTwoByteStringCid:
819 case kExternalOneByteStringCid:
820 case kExternalTwoByteStringCid:
653 return kTagged; 821 return kTagged;
654 case kTypedDataInt32ArrayCid: 822 case kTypedDataInt32ArrayCid:
655 return kUnboxedInt32; 823 return kUnboxedInt32;
656 case kTypedDataUint32ArrayCid: 824 case kTypedDataUint32ArrayCid:
657 return kUnboxedUint32; 825 return kUnboxedUint32;
658 case kTypedDataFloat32ArrayCid: 826 case kTypedDataFloat32ArrayCid:
659 case kTypedDataFloat64ArrayCid: 827 case kTypedDataFloat64ArrayCid:
660 return kUnboxedDouble; 828 return kUnboxedDouble;
661 case kTypedDataInt32x4ArrayCid: 829 case kTypedDataInt32x4ArrayCid:
662 return kUnboxedInt32x4; 830 return kUnboxedInt32x4;
(...skipping 10 matching lines...) Expand all
673 841
674 Representation StoreIndexedInstr::RequiredInputRepresentation( 842 Representation StoreIndexedInstr::RequiredInputRepresentation(
675 intptr_t idx) const { 843 intptr_t idx) const {
676 // Array can be a Dart object or a pointer to external data. 844 // Array can be a Dart object or a pointer to external data.
677 if (idx == 0) return kNoRepresentation; // Flexible input representation. 845 if (idx == 0) return kNoRepresentation; // Flexible input representation.
678 if (idx == 1) return kTagged; // Index is a smi. 846 if (idx == 1) return kTagged; // Index is a smi.
679 ASSERT(idx == 2); 847 ASSERT(idx == 2);
680 switch (class_id_) { 848 switch (class_id_) {
681 case kArrayCid: 849 case kArrayCid:
682 case kOneByteStringCid: 850 case kOneByteStringCid:
851 case kTwoByteStringCid:
852 case kExternalOneByteStringCid:
853 case kExternalTwoByteStringCid:
683 case kTypedDataInt8ArrayCid: 854 case kTypedDataInt8ArrayCid:
684 case kTypedDataUint8ArrayCid: 855 case kTypedDataUint8ArrayCid:
685 case kExternalTypedDataUint8ArrayCid: 856 case kExternalTypedDataUint8ArrayCid:
686 case kTypedDataUint8ClampedArrayCid: 857 case kTypedDataUint8ClampedArrayCid:
687 case kExternalTypedDataUint8ClampedArrayCid: 858 case kExternalTypedDataUint8ClampedArrayCid:
688 case kTypedDataInt16ArrayCid: 859 case kTypedDataInt16ArrayCid:
689 case kTypedDataUint16ArrayCid: 860 case kTypedDataUint16ArrayCid:
690 return kTagged; 861 return kTagged;
691 case kTypedDataInt32ArrayCid: 862 case kTypedDataInt32ArrayCid:
692 return kUnboxedInt32; 863 return kUnboxedInt32;
693 case kTypedDataUint32ArrayCid: 864 case kTypedDataUint32ArrayCid:
694 return kUnboxedUint32; 865 return kUnboxedUint32;
695 case kTypedDataFloat32ArrayCid: 866 case kTypedDataFloat32ArrayCid:
696 case kTypedDataFloat64ArrayCid: 867 case kTypedDataFloat64ArrayCid:
697 return kUnboxedDouble; 868 return kUnboxedDouble;
698 case kTypedDataFloat32x4ArrayCid: 869 case kTypedDataFloat32x4ArrayCid:
699 return kUnboxedFloat32x4; 870 return kUnboxedFloat32x4;
700 case kTypedDataInt32x4ArrayCid: 871 case kTypedDataInt32x4ArrayCid:
701 return kUnboxedInt32x4; 872 return kUnboxedInt32x4;
702 case kTypedDataFloat64x2ArrayCid: 873 case kTypedDataFloat64x2ArrayCid:
703 return kUnboxedFloat64x2; 874 return kUnboxedFloat64x2;
704 default: 875 default:
705 UNREACHABLE(); 876 UNREACHABLE();
706 return kTagged; 877 return kTagged;
707 } 878 }
708 } 879 }
709 880
881
882 void Environment::DropArguments(intptr_t argc) {
883 #if defined(DEBUG)
884 // Check that we are in the backend - register allocation has been run.
885 ASSERT(locations_ != NULL);
886
887 // Check that we are only dropping PushArgument instructions from the
888 // environment.
889 ASSERT(argc <= values_.length());
890 for (intptr_t i = 0; i < argc; i++) {
891 ASSERT(values_[values_.length() - i - 1]->definition()->IsPushArgument());
892 }
893 #endif
894 values_.TruncateTo(values_.length() - argc);
895 }
896
897
710 } // namespace dart 898 } // namespace dart
711 899
712 #endif // defined TARGET_ARCH_DBC 900 #endif // defined TARGET_ARCH_DBC
OLDNEW
« no previous file with comments | « runtime/vm/intermediate_language.cc ('k') | runtime/vm/parser.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698