OLD | NEW |
1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64. |
6 #if defined(TARGET_ARCH_ARM64) | 6 #if defined(TARGET_ARCH_ARM64) |
7 | 7 |
8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
9 | 9 |
10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
102 return NULL; | 102 return NULL; |
103 } | 103 } |
104 | 104 |
105 | 105 |
106 void IfThenElseInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 106 void IfThenElseInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
107 UNIMPLEMENTED(); | 107 UNIMPLEMENTED(); |
108 } | 108 } |
109 | 109 |
110 | 110 |
111 LocationSummary* ClosureCallInstr::MakeLocationSummary(bool opt) const { | 111 LocationSummary* ClosureCallInstr::MakeLocationSummary(bool opt) const { |
112 UNIMPLEMENTED(); | 112 return MakeCallSummary(); |
113 return NULL; | |
114 } | 113 } |
115 | 114 |
116 | 115 |
117 void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 116 void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
118 UNIMPLEMENTED(); | 117 // Load closure object (first argument) in R1. |
| 118 int argument_count = ArgumentCount(); |
| 119 __ LoadFromOffset(R1, SP, (argument_count - 1) * kWordSize); |
| 120 |
| 121 // Load arguments descriptor in R4. |
| 122 const Array& arguments_descriptor = |
| 123 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, |
| 124 argument_names())); |
| 125 __ LoadObject(R4, arguments_descriptor, PP); |
| 126 |
| 127 // Load the closure function into R0. |
| 128 __ LoadFieldFromOffset(R0, R1, Closure::function_offset()); |
| 129 |
| 130 // Load closure context in CTX; note that CTX has already been preserved. |
| 131 __ LoadFieldFromOffset(CTX, R1, Closure::context_offset()); |
| 132 |
| 133 // R4: Arguments descriptor. |
| 134 // R0: Function. |
| 135 __ LoadFieldFromOffset(R2, R0, Function::code_offset()); |
| 136 |
| 137 // R2: code. |
| 138 // R5: Smi 0 (no IC data; the lazy-compile stub expects a GC-safe value). |
| 139 __ LoadImmediate(R5, 0, PP); |
| 140 __ LoadFieldFromOffset(R2, R2, Code::instructions_offset()); |
| 141 __ AddImmediate(R2, R2, Instructions::HeaderSize() - kHeapObjectTag, PP); |
| 142 __ blr(R2); |
| 143 compiler->AddCurrentDescriptor(PcDescriptors::kClosureCall, |
| 144 deopt_id(), |
| 145 token_pos()); |
| 146 compiler->RecordSafepoint(locs()); |
| 147 // Marks either the continuation point in unoptimized code or the |
| 148 // deoptimization point in optimized code, after call. |
| 149 const intptr_t deopt_id_after = Isolate::ToDeoptAfter(deopt_id()); |
| 150 if (compiler->is_optimizing()) { |
| 151 compiler->AddDeoptIndexAtCall(deopt_id_after, token_pos()); |
| 152 } else { |
| 153 // Add deoptimization continuation point after the call and before the |
| 154 // arguments are removed. |
| 155 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, |
| 156 deopt_id_after, |
| 157 token_pos()); |
| 158 } |
| 159 __ Drop(argument_count); |
119 } | 160 } |
120 | 161 |
121 | 162 |
122 LocationSummary* LoadLocalInstr::MakeLocationSummary(bool opt) const { | 163 LocationSummary* LoadLocalInstr::MakeLocationSummary(bool opt) const { |
123 return LocationSummary::Make(0, | 164 return LocationSummary::Make(0, |
124 Location::RequiresRegister(), | 165 Location::RequiresRegister(), |
125 LocationSummary::kNoCall); | 166 LocationSummary::kNoCall); |
126 } | 167 } |
127 | 168 |
128 | 169 |
(...skipping 28 matching lines...) Expand all Loading... |
157 void ConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 198 void ConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
158 // The register allocator drops constant definitions that have no uses. | 199 // The register allocator drops constant definitions that have no uses. |
159 if (!locs()->out(0).IsInvalid()) { | 200 if (!locs()->out(0).IsInvalid()) { |
160 Register result = locs()->out(0).reg(); | 201 Register result = locs()->out(0).reg(); |
161 __ LoadObject(result, value(), PP); | 202 __ LoadObject(result, value(), PP); |
162 } | 203 } |
163 } | 204 } |
164 | 205 |
165 | 206 |
166 LocationSummary* AssertAssignableInstr::MakeLocationSummary(bool opt) const { | 207 LocationSummary* AssertAssignableInstr::MakeLocationSummary(bool opt) const { |
167 UNIMPLEMENTED(); | 208 const intptr_t kNumInputs = 3; |
168 return NULL; | 209 const intptr_t kNumTemps = 0; |
| 210 LocationSummary* summary = |
| 211 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kCall); |
| 212 summary->set_in(0, Location::RegisterLocation(R0)); // Value. |
| 213 summary->set_in(1, Location::RegisterLocation(R2)); // Instantiator. |
| 214 summary->set_in(2, Location::RegisterLocation(R1)); // Type arguments. |
| 215 summary->set_out(0, Location::RegisterLocation(R0)); |
| 216 return summary; |
169 } | 217 } |
170 | 218 |
171 | 219 |
172 LocationSummary* AssertBooleanInstr::MakeLocationSummary(bool opt) const { | 220 LocationSummary* AssertBooleanInstr::MakeLocationSummary(bool opt) const { |
173 UNIMPLEMENTED(); | 221 UNIMPLEMENTED(); |
174 return NULL; | 222 return NULL; |
175 } | 223 } |
176 | 224 |
177 | 225 |
178 void AssertBooleanInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 226 void AssertBooleanInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
358 return NULL; | 406 return NULL; |
359 } | 407 } |
360 | 408 |
361 | 409 |
362 void StringToCharCodeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 410 void StringToCharCodeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
363 UNIMPLEMENTED(); | 411 UNIMPLEMENTED(); |
364 } | 412 } |
365 | 413 |
366 | 414 |
367 LocationSummary* StringInterpolateInstr::MakeLocationSummary(bool opt) const { | 415 LocationSummary* StringInterpolateInstr::MakeLocationSummary(bool opt) const { |
368 UNIMPLEMENTED(); | 416 const intptr_t kNumInputs = 1; |
369 return NULL; | 417 const intptr_t kNumTemps = 0; |
| 418 LocationSummary* summary = |
| 419 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kCall); |
| 420 summary->set_in(0, Location::RegisterLocation(R0)); |
| 421 summary->set_out(0, Location::RegisterLocation(R0)); |
| 422 return summary; |
370 } | 423 } |
371 | 424 |
372 | 425 |
373 void StringInterpolateInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 426 void StringInterpolateInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
374 UNIMPLEMENTED(); | 427 Register array = locs()->in(0).reg(); |
| 428 __ Push(array); |
| 429 const int kNumberOfArguments = 1; |
| 430 const Array& kNoArgumentNames = Object::null_array(); |
| 431 compiler->GenerateStaticCall(deopt_id(), |
| 432 token_pos(), |
| 433 CallFunction(), |
| 434 kNumberOfArguments, |
| 435 kNoArgumentNames, |
| 436 locs()); |
| 437 ASSERT(locs()->out(0).reg() == R0); |
375 } | 438 } |
376 | 439 |
377 | 440 |
378 LocationSummary* LoadUntaggedInstr::MakeLocationSummary(bool opt) const { | 441 LocationSummary* LoadUntaggedInstr::MakeLocationSummary(bool opt) const { |
379 UNIMPLEMENTED(); | 442 UNIMPLEMENTED(); |
380 return NULL; | 443 return NULL; |
381 } | 444 } |
382 | 445 |
383 | 446 |
384 void LoadUntaggedInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 447 void LoadUntaggedInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
385 UNIMPLEMENTED(); | 448 UNIMPLEMENTED(); |
386 } | 449 } |
387 | 450 |
388 | 451 |
389 LocationSummary* LoadClassIdInstr::MakeLocationSummary(bool opt) const { | 452 LocationSummary* LoadClassIdInstr::MakeLocationSummary(bool opt) const { |
390 UNIMPLEMENTED(); | 453 const intptr_t kNumInputs = 1; |
391 return NULL; | 454 return LocationSummary::Make(kNumInputs, |
| 455 Location::RequiresRegister(), |
| 456 LocationSummary::kNoCall); |
392 } | 457 } |
393 | 458 |
394 | 459 |
395 void LoadClassIdInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 460 void LoadClassIdInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
396 UNIMPLEMENTED(); | 461 Register object = locs()->in(0).reg(); |
| 462 Register result = locs()->out(0).reg(); |
| 463 Label load, done; |
| 464 __ tsti(object, kSmiTagMask); |
| 465 __ b(&load, NE); |
| 466 __ LoadImmediate(result, Smi::RawValue(kSmiCid), PP); |
| 467 __ b(&done); |
| 468 __ Bind(&load); |
| 469 __ LoadClassId(result, object); |
| 470 __ SmiTag(result); |
| 471 __ Bind(&done); |
397 } | 472 } |
398 | 473 |
399 | 474 |
400 CompileType LoadIndexedInstr::ComputeType() const { | 475 CompileType LoadIndexedInstr::ComputeType() const { |
401 UNIMPLEMENTED(); | 476 UNIMPLEMENTED(); |
402 return CompileType::Dynamic(); | 477 return CompileType::Dynamic(); |
403 } | 478 } |
404 | 479 |
405 | 480 |
406 Representation LoadIndexedInstr::representation() const { | 481 Representation LoadIndexedInstr::representation() const { |
407 UNIMPLEMENTED(); | 482 UNIMPLEMENTED(); |
408 return kTagged; | 483 return kTagged; |
409 } | 484 } |
410 | 485 |
411 | 486 |
412 LocationSummary* LoadIndexedInstr::MakeLocationSummary(bool opt) const { | 487 LocationSummary* LoadIndexedInstr::MakeLocationSummary(bool opt) const { |
413 UNIMPLEMENTED(); | 488 UNIMPLEMENTED(); |
414 return NULL; | 489 return NULL; |
415 } | 490 } |
416 | 491 |
417 | 492 |
418 void LoadIndexedInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 493 void LoadIndexedInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
419 UNIMPLEMENTED(); | 494 UNIMPLEMENTED(); |
420 } | 495 } |
421 | 496 |
422 | 497 |
423 Representation StoreIndexedInstr::RequiredInputRepresentation( | 498 Representation StoreIndexedInstr::RequiredInputRepresentation( |
424 intptr_t idx) const { | 499 intptr_t idx) const { |
425 UNIMPLEMENTED(); | 500 // Array can be a Dart object or a pointer to external data. |
426 return kTagged; | 501 if (idx == 0) return kNoRepresentation; // Flexible input representation. |
| 502 if (idx == 1) return kTagged; // Index is a smi. |
| 503 ASSERT(idx == 2); |
| 504 switch (class_id_) { |
| 505 case kArrayCid: |
| 506 case kOneByteStringCid: |
| 507 case kTypedDataInt8ArrayCid: |
| 508 case kTypedDataUint8ArrayCid: |
| 509 case kExternalTypedDataUint8ArrayCid: |
| 510 case kTypedDataUint8ClampedArrayCid: |
| 511 case kExternalTypedDataUint8ClampedArrayCid: |
| 512 case kTypedDataInt16ArrayCid: |
| 513 case kTypedDataUint16ArrayCid: |
| 514 return kTagged; |
| 515 case kTypedDataInt32ArrayCid: |
| 516 case kTypedDataUint32ArrayCid: |
| 517 return value()->IsSmiValue() ? kTagged : kUnboxedMint; |
| 518 case kTypedDataFloat32ArrayCid: |
| 519 case kTypedDataFloat64ArrayCid: |
| 520 return kUnboxedDouble; |
| 521 case kTypedDataFloat32x4ArrayCid: |
| 522 return kUnboxedFloat32x4; |
| 523 case kTypedDataInt32x4ArrayCid: |
| 524 return kUnboxedInt32x4; |
| 525 case kTypedDataFloat64x2ArrayCid: |
| 526 return kUnboxedFloat64x2; |
| 527 default: |
| 528 UNREACHABLE(); |
| 529 return kTagged; |
| 530 } |
427 } | 531 } |
428 | 532 |
429 | 533 |
430 LocationSummary* StoreIndexedInstr::MakeLocationSummary(bool opt) const { | 534 LocationSummary* StoreIndexedInstr::MakeLocationSummary(bool opt) const { |
431 UNIMPLEMENTED(); | 535 const intptr_t kNumInputs = 3; |
432 return NULL; | 536 const intptr_t kNumTemps = 0; |
| 537 LocationSummary* locs = |
| 538 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); |
| 539 locs->set_in(0, Location::RequiresRegister()); |
| 540 // The smi index is either untagged (element size == 1), or it is left smi |
| 541 // tagged (for all element sizes > 1). |
| 542 // TODO(regis): Revisit and see if the index can be immediate. |
| 543 locs->set_in(1, Location::WritableRegister()); |
| 544 switch (class_id()) { |
| 545 case kArrayCid: |
| 546 locs->set_in(2, ShouldEmitStoreBarrier() |
| 547 ? Location::WritableRegister() |
| 548 : Location::RegisterOrConstant(value())); |
| 549 break; |
| 550 case kExternalTypedDataUint8ArrayCid: |
| 551 case kExternalTypedDataUint8ClampedArrayCid: |
| 552 case kTypedDataInt8ArrayCid: |
| 553 case kTypedDataUint8ArrayCid: |
| 554 case kTypedDataUint8ClampedArrayCid: |
| 555 case kOneByteStringCid: |
| 556 case kTypedDataInt16ArrayCid: |
| 557 case kTypedDataUint16ArrayCid: |
| 558 locs->set_in(2, Location::WritableRegister()); |
| 559 break; |
| 560 case kTypedDataInt32ArrayCid: |
| 561 case kTypedDataUint32ArrayCid: |
| 562 // Mints are stored in Q registers. For smis, use a writable register |
| 563 // because the value must be untagged before storing. |
| 564 if (value()->IsSmiValue()) { |
| 565 locs->set_in(2, Location::WritableRegister()); |
| 566 } else { |
| 567 // TODO(zra): Implement when we add fpu loads and stores. |
| 568 UNIMPLEMENTED(); |
| 569 } |
| 570 break; |
| 571 case kTypedDataFloat32ArrayCid: |
| 572 case kTypedDataFloat64ArrayCid: // TODO(srdjan): Support Float64 constants. |
| 573 case kTypedDataInt32x4ArrayCid: |
| 574 case kTypedDataFloat32x4ArrayCid: |
| 575 case kTypedDataFloat64x2ArrayCid: |
| 576 // TODO(zra): Implement when we add fpu loads and stores. |
| 577 UNIMPLEMENTED(); |
| 578 break; |
| 579 default: |
| 580 UNREACHABLE(); |
| 581 return NULL; |
| 582 } |
| 583 return locs; |
433 } | 584 } |
434 | 585 |
435 | 586 |
436 void StoreIndexedInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 587 void StoreIndexedInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
437 UNIMPLEMENTED(); | 588 Register array = locs()->in(0).reg(); |
| 589 Location index = locs()->in(1); |
| 590 |
| 591 Address element_address(kNoRegister, 0); |
| 592 ASSERT(index.IsRegister()); // TODO(regis): Revisit. |
| 593 // Note that index is expected smi-tagged, (i.e, times 2) for all arrays |
| 594 // with index scale factor > 1. E.g., for Uint8Array and OneByteString the |
| 595 // index is expected to be untagged before accessing. |
| 596 ASSERT(kSmiTagShift == 1); |
| 597 switch (index_scale()) { |
| 598 case 1: { |
| 599 __ SmiUntag(index.reg()); |
| 600 break; |
| 601 } |
| 602 case 2: { |
| 603 break; |
| 604 } |
| 605 case 4: { |
| 606 __ Lsl(index.reg(), index.reg(), 1); |
| 607 break; |
| 608 } |
| 609 case 8: { |
| 610 __ Lsl(index.reg(), index.reg(), 2); |
| 611 break; |
| 612 } |
| 613 case 16: { |
| 614 __ Lsl(index.reg(), index.reg(), 3); |
| 615 break; |
| 616 } |
| 617 default: |
| 618 UNREACHABLE(); |
| 619 } |
| 620 if (!IsExternal()) { |
| 621 ASSERT(this->array()->definition()->representation() == kTagged); |
| 622 __ AddImmediate(index.reg(), index.reg(), |
| 623 FlowGraphCompiler::DataOffsetFor(class_id()) - kHeapObjectTag, PP); |
| 624 } |
| 625 element_address = Address(array, index.reg(), UXTX, Address::Unscaled); |
| 626 |
| 627 switch (class_id()) { |
| 628 case kArrayCid: |
| 629 if (ShouldEmitStoreBarrier()) { |
| 630 Register value = locs()->in(2).reg(); |
| 631 __ StoreIntoObject(array, element_address, value); |
| 632 } else if (locs()->in(2).IsConstant()) { |
| 633 const Object& constant = locs()->in(2).constant(); |
| 634 __ StoreIntoObjectNoBarrier(array, element_address, constant); |
| 635 } else { |
| 636 Register value = locs()->in(2).reg(); |
| 637 __ StoreIntoObjectNoBarrier(array, element_address, value); |
| 638 } |
| 639 break; |
| 640 case kTypedDataInt8ArrayCid: |
| 641 case kTypedDataUint8ArrayCid: |
| 642 case kExternalTypedDataUint8ArrayCid: |
| 643 case kOneByteStringCid: { |
| 644 if (locs()->in(2).IsConstant()) { |
| 645 const Smi& constant = Smi::Cast(locs()->in(2).constant()); |
| 646 __ LoadImmediate(TMP, static_cast<int8_t>(constant.Value()), PP); |
| 647 __ str(TMP, element_address, kUnsignedByte); |
| 648 } else { |
| 649 Register value = locs()->in(2).reg(); |
| 650 __ SmiUntag(value); |
| 651 __ str(value, element_address, kUnsignedByte); |
| 652 } |
| 653 break; |
| 654 } |
| 655 case kTypedDataUint8ClampedArrayCid: |
| 656 case kExternalTypedDataUint8ClampedArrayCid: { |
| 657 if (locs()->in(2).IsConstant()) { |
| 658 const Smi& constant = Smi::Cast(locs()->in(2).constant()); |
| 659 intptr_t value = constant.Value(); |
| 660 // Clamp to 0x0 or 0xFF respectively. |
| 661 if (value > 0xFF) { |
| 662 value = 0xFF; |
| 663 } else if (value < 0) { |
| 664 value = 0; |
| 665 } |
| 666 __ LoadImmediate(TMP, static_cast<int8_t>(value), PP); |
| 667 __ str(TMP, element_address, kUnsignedByte); |
| 668 } else { |
| 669 Register value = locs()->in(2).reg(); |
| 670 Label store_value; |
| 671 __ SmiUntag(value); |
| 672 __ CompareImmediate(value, 0xFF, PP); |
| 673 // Clamp to 0x00 or 0xFF respectively. |
| 674 __ b(&store_value, LS); |
| 675 __ LoadImmediate(TMP, 0x00, PP); |
| 676 __ LoadImmediate(TMP2, 0xFF, PP); |
| 677 __ csel(value, TMP, value, LE); |
| 678 __ csel(value, TMP2, value, GT); |
| 679 __ Bind(&store_value); |
| 680 __ str(value, element_address, kUnsignedByte); |
| 681 } |
| 682 break; |
| 683 } |
| 684 case kTypedDataInt16ArrayCid: |
| 685 case kTypedDataUint16ArrayCid: { |
| 686 Register value = locs()->in(2).reg(); |
| 687 __ SmiUntag(value); |
| 688 __ str(value, element_address, kUnsignedHalfword); |
| 689 break; |
| 690 } |
| 691 case kTypedDataInt32ArrayCid: |
| 692 case kTypedDataUint32ArrayCid: { |
| 693 if (value()->IsSmiValue()) { |
| 694 ASSERT(RequiredInputRepresentation(2) == kTagged); |
| 695 Register value = locs()->in(2).reg(); |
| 696 __ SmiUntag(value); |
| 697 __ str(value, element_address); |
| 698 } else { |
| 699 // TODO(zra): Implement when we add fpu loads and stores. |
| 700 UNIMPLEMENTED(); |
| 701 } |
| 702 break; |
| 703 } |
| 704 case kTypedDataFloat32ArrayCid: |
| 705 case kTypedDataFloat64ArrayCid: |
| 706 case kTypedDataFloat64x2ArrayCid: |
| 707 case kTypedDataInt32x4ArrayCid: |
| 708 case kTypedDataFloat32x4ArrayCid: { |
| 709 // TODO(zra): Implement when we add fpu loads and stores. |
| 710 UNIMPLEMENTED(); |
| 711 break; |
| 712 } |
| 713 default: |
| 714 UNREACHABLE(); |
| 715 } |
| 716 } |
| 717 |
| 718 |
| 719 static void LoadValueCid(FlowGraphCompiler* compiler, |
| 720 Register value_cid_reg, |
| 721 Register value_reg, |
| 722 Label* value_is_smi = NULL) { |
| 723 Label done; |
| 724 if (value_is_smi == NULL) { |
| 725 __ LoadImmediate(value_cid_reg, kSmiCid, PP); |
| 726 } |
| 727 __ tsti(value_reg, kSmiTagMask); |
| 728 if (value_is_smi == NULL) { |
| 729 __ b(&done, EQ); |
| 730 } else { |
| 731 __ b(value_is_smi, EQ); |
| 732 } |
| 733 __ LoadClassId(value_cid_reg, value_reg); |
| 734 __ Bind(&done); |
438 } | 735 } |
439 | 736 |
440 | 737 |
441 LocationSummary* GuardFieldInstr::MakeLocationSummary(bool opt) const { | 738 LocationSummary* GuardFieldInstr::MakeLocationSummary(bool opt) const { |
442 UNIMPLEMENTED(); | 739 const intptr_t kNumInputs = 1; |
443 return NULL; | 740 LocationSummary* summary = |
| 741 new LocationSummary(kNumInputs, 0, LocationSummary::kNoCall); |
| 742 summary->set_in(0, Location::RequiresRegister()); |
| 743 const bool field_has_length = field().needs_length_check(); |
| 744 summary->AddTemp(Location::RequiresRegister()); |
| 745 summary->AddTemp(Location::RequiresRegister()); |
| 746 const bool need_field_temp_reg = |
| 747 field_has_length || (field().guarded_cid() == kIllegalCid); |
| 748 if (need_field_temp_reg) { |
| 749 summary->AddTemp(Location::RequiresRegister()); |
| 750 } |
| 751 return summary; |
444 } | 752 } |
445 | 753 |
446 | 754 |
447 void GuardFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 755 void GuardFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
448 UNIMPLEMENTED(); | 756 const intptr_t field_cid = field().guarded_cid(); |
449 } | 757 const intptr_t nullability = field().is_nullable() ? kNullCid : kIllegalCid; |
| 758 const intptr_t field_length = field().guarded_list_length(); |
| 759 const bool field_has_length = field().needs_length_check(); |
| 760 const bool needs_field_temp_reg = |
| 761 field_has_length || (field().guarded_cid() == kIllegalCid); |
| 762 if (field_has_length) { |
| 763 // Currently, we should only see final fields that remember length. |
| 764 ASSERT(field().is_final()); |
| 765 } |
| 766 |
| 767 if (field_cid == kDynamicCid) { |
| 768 ASSERT(!compiler->is_optimizing()); |
| 769 return; // Nothing to emit. |
| 770 } |
| 771 |
| 772 const intptr_t value_cid = value()->Type()->ToCid(); |
| 773 |
| 774 Register value_reg = locs()->in(0).reg(); |
| 775 |
| 776 Register value_cid_reg = locs()->temp(0).reg(); |
| 777 |
| 778 Register temp_reg = locs()->temp(1).reg(); |
| 779 |
| 780 Register field_reg = needs_field_temp_reg ? |
| 781 locs()->temp(locs()->temp_count() - 1).reg() : kNoRegister; |
| 782 |
| 783 Label ok, fail_label; |
| 784 |
| 785 Label* deopt = compiler->is_optimizing() ? |
| 786 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptGuardField) : NULL; |
| 787 |
| 788 Label* fail = (deopt != NULL) ? deopt : &fail_label; |
| 789 |
| 790 if (!compiler->is_optimizing() || (field_cid == kIllegalCid)) { |
| 791 if (!compiler->is_optimizing() && (field_reg == kNoRegister)) { |
| 792 // Currently we can't have different location summaries for optimized |
| 793 // and non-optimized code. So instead we manually pick up a register |
| 794 // that is known to be free because we know how non-optimizing compiler |
| 795 // allocates registers. |
| 796 field_reg = R2; |
| 797 ASSERT((field_reg != value_reg) && (field_reg != value_cid_reg)); |
| 798 } |
| 799 |
| 800 __ LoadObject(field_reg, Field::ZoneHandle(field().raw()), PP); |
| 801 |
| 802 FieldAddress field_cid_operand(field_reg, Field::guarded_cid_offset()); |
| 803 FieldAddress field_nullability_operand( |
| 804 field_reg, Field::is_nullable_offset()); |
| 805 FieldAddress field_length_operand( |
| 806 field_reg, Field::guarded_list_length_offset()); |
| 807 |
| 808 ASSERT(value_cid_reg != kNoRegister); |
| 809 ASSERT((value_cid_reg != value_reg) && (field_reg != value_cid_reg)); |
| 810 |
| 811 if (value_cid == kDynamicCid) { |
| 812 LoadValueCid(compiler, value_cid_reg, value_reg); |
| 813 Label skip_length_check; |
| 814 __ ldr(TMP, field_cid_operand); |
| 815 __ CompareRegisters(value_cid_reg, TMP); |
| 816 __ b(&skip_length_check, NE); |
| 817 if (field_has_length) { |
| 818 ASSERT(temp_reg != kNoRegister); |
| 819 // Field guard may have remembered list length, check it. |
| 820 if ((field_cid == kArrayCid) || (field_cid == kImmutableArrayCid)) { |
| 821 __ LoadFieldFromOffset(temp_reg, value_reg, Array::length_offset()); |
| 822 __ CompareImmediate(temp_reg, Smi::RawValue(field_length), PP); |
| 823 } else if (RawObject::IsTypedDataClassId(field_cid)) { |
| 824 __ LoadFieldFromOffset( |
| 825 temp_reg, value_reg, TypedData::length_offset()); |
| 826 __ CompareImmediate(temp_reg, Smi::RawValue(field_length), PP); |
| 827 } else { |
| 828 ASSERT(field_cid == kIllegalCid); |
| 829 ASSERT(field_length == Field::kUnknownFixedLength); |
| 830 // At compile time we do not know the type of the field nor its |
| 831 // length. At execution time we may have set the class id and |
| 832 // list length so we compare the guarded length with the |
| 833 // list length here, without this check the list length could change |
| 834 // without triggering a deoptimization. |
| 835 Label check_array, length_compared, no_fixed_length; |
| 836 // If length is negative the length guard is either disabled or |
| 837 // has not been initialized, either way it is safe to skip the |
| 838 // length check. |
| 839 __ ldr(TMP, field_length_operand); |
| 840 __ CompareImmediate(TMP, 0, PP); |
| 841 __ b(&skip_length_check, LT); |
| 842 __ CompareImmediate(value_cid_reg, kNullCid, PP); |
| 843 __ b(&no_fixed_length, EQ); |
| 844 // Check for typed data array. |
| 845 __ CompareImmediate(value_cid_reg, kTypedDataInt32x4ArrayCid, PP); |
| 846 __ b(&no_fixed_length, GT); |
| 847 __ CompareImmediate(value_cid_reg, kTypedDataInt8ArrayCid, PP); |
| 848 // Could still be a regular array. |
| 849 __ b(&check_array, LT); |
| 850 __ LoadFromOffset(temp_reg, value_reg, TypedData::length_offset()); |
| 851 __ ldr(TMP, field_length_operand); |
| 852 __ CompareRegisters(temp_reg, TMP); |
| 853 __ b(&length_compared); |
| 854 // Check for regular array. |
| 855 __ Bind(&check_array); |
| 856 __ CompareImmediate(value_cid_reg, kImmutableArrayCid, PP); |
| 857 __ b(&no_fixed_length, GT); |
| 858 __ CompareImmediate(value_cid_reg, kArrayCid, PP); |
| 859 __ b(&no_fixed_length, LT); |
| 860 __ LoadFieldFromOffset(temp_reg, value_reg, Array::length_offset()); |
| 861 __ ldr(TMP, field_length_operand); |
| 862 __ CompareRegisters(temp_reg, TMP); |
| 863 __ b(&length_compared); |
| 864 __ Bind(&no_fixed_length); |
| 865 __ b(fail); |
| 866 __ Bind(&length_compared); |
| 867 // Following branch cannot not occur, fall through. |
| 868 } |
| 869 __ b(fail, NE); |
| 870 } |
| 871 __ Bind(&skip_length_check); |
| 872 __ ldr(TMP, field_nullability_operand); |
| 873 __ CompareRegisters(value_cid_reg, TMP); |
| 874 } else if (value_cid == kNullCid) { |
| 875 __ ldr(value_cid_reg, field_nullability_operand); |
| 876 __ CompareImmediate(value_cid_reg, value_cid, PP); |
| 877 } else { |
| 878 Label skip_length_check; |
| 879 __ ldr(value_cid_reg, field_cid_operand); |
| 880 __ CompareImmediate(value_cid_reg, value_cid, PP); |
| 881 __ b(&skip_length_check, NE); |
| 882 if (field_has_length) { |
| 883 ASSERT(value_cid_reg != kNoRegister); |
| 884 ASSERT(temp_reg != kNoRegister); |
| 885 if ((value_cid == kArrayCid) || (value_cid == kImmutableArrayCid)) { |
| 886 __ LoadFieldFromOffset(temp_reg, value_reg, Array::length_offset()); |
| 887 __ CompareImmediate(temp_reg, Smi::RawValue(field_length), PP); |
| 888 } else if (RawObject::IsTypedDataClassId(value_cid)) { |
| 889 __ LoadFieldFromOffset( |
| 890 temp_reg, value_reg, TypedData::length_offset()); |
| 891 __ CompareImmediate(temp_reg, Smi::RawValue(field_length), PP); |
| 892 } else if (field_cid != kIllegalCid) { |
| 893 ASSERT(field_cid != value_cid); |
| 894 ASSERT(field_length >= 0); |
| 895 // Field has a known class id and length. At compile time it is |
| 896 // known that the value's class id is not a fixed length list. |
| 897 __ b(fail); |
| 898 } else { |
| 899 ASSERT(field_cid == kIllegalCid); |
| 900 ASSERT(field_length == Field::kUnknownFixedLength); |
| 901 // Following jump cannot not occur, fall through. |
| 902 } |
| 903 __ b(fail, NE); |
| 904 } |
| 905 // Not identical, possibly null. |
| 906 __ Bind(&skip_length_check); |
| 907 } |
| 908 __ b(&ok, EQ); |
| 909 |
| 910 __ ldr(TMP, field_cid_operand); |
| 911 __ CompareImmediate(TMP, kIllegalCid, PP); |
| 912 __ b(fail, NE); |
| 913 |
| 914 if (value_cid == kDynamicCid) { |
| 915 __ str(value_cid_reg, field_cid_operand); |
| 916 __ str(value_cid_reg, field_nullability_operand); |
| 917 if (field_has_length) { |
| 918 Label check_array, length_set, no_fixed_length; |
| 919 __ CompareImmediate(value_cid_reg, kNullCid, PP); |
| 920 __ b(&no_fixed_length, EQ); |
| 921 // Check for typed data array. |
| 922 __ CompareImmediate(value_cid_reg, kTypedDataInt32x4ArrayCid, PP); |
| 923 __ b(&no_fixed_length, GT); |
| 924 __ CompareImmediate(value_cid_reg, kTypedDataInt8ArrayCid, PP); |
| 925 // Could still be a regular array. |
| 926 __ b(&check_array, LT); |
| 927 // Destroy value_cid_reg (safe because we are finished with it). |
| 928 __ LoadFieldFromOffset( |
| 929 value_cid_reg, value_reg, TypedData::length_offset()); |
| 930 __ str(value_cid_reg, field_length_operand); |
| 931 __ b(&length_set); // Updated field length typed data array. |
| 932 // Check for regular array. |
| 933 __ Bind(&check_array); |
| 934 __ CompareImmediate(value_cid_reg, kImmutableArrayCid, PP); |
| 935 __ b(&no_fixed_length, GT); |
| 936 __ CompareImmediate(value_cid_reg, kArrayCid, PP); |
| 937 __ b(&no_fixed_length, LT); |
| 938 // Destroy value_cid_reg (safe because we are finished with it). |
| 939 __ LoadFieldFromOffset( |
| 940 value_cid_reg, value_reg, Array::length_offset()); |
| 941 __ str(value_cid_reg, field_length_operand); |
| 942 // Updated field length from regular array. |
| 943 __ b(&length_set); |
| 944 __ Bind(&no_fixed_length); |
| 945 __ LoadImmediate(TMP, Smi::RawValue(Field::kNoFixedLength), PP); |
| 946 __ str(TMP, field_length_operand); |
| 947 __ Bind(&length_set); |
| 948 } |
| 949 } else { |
| 950 __ LoadImmediate(TMP, value_cid, PP); |
| 951 __ str(TMP, field_cid_operand); |
| 952 __ str(TMP, field_nullability_operand); |
| 953 if (field_has_length) { |
| 954 if ((value_cid == kArrayCid) || (value_cid == kImmutableArrayCid)) { |
| 955 // Destroy value_cid_reg (safe because we are finished with it). |
| 956 __ LoadFieldFromOffset( |
| 957 value_cid_reg, value_reg, Array::length_offset()); |
| 958 __ str(value_cid_reg, field_length_operand); |
| 959 } else if (RawObject::IsTypedDataClassId(value_cid)) { |
| 960 // Destroy value_cid_reg (safe because we are finished with it). |
| 961 __ LoadFieldFromOffset( |
| 962 value_cid_reg, value_reg, TypedData::length_offset()); |
| 963 __ str(value_cid_reg, field_length_operand); |
| 964 } else { |
| 965 __ LoadImmediate(TMP, Smi::RawValue(Field::kNoFixedLength), PP); |
| 966 __ str(TMP, field_length_operand); |
| 967 } |
| 968 } |
| 969 } |
| 970 |
| 971 if (deopt == NULL) { |
| 972 ASSERT(!compiler->is_optimizing()); |
| 973 __ b(&ok); |
| 974 __ Bind(fail); |
| 975 |
| 976 __ LoadFieldFromOffset(TMP, field_reg, Field::guarded_cid_offset()); |
| 977 __ CompareImmediate(TMP, kDynamicCid, PP); |
| 978 __ b(&ok, EQ); |
| 979 |
| 980 __ Push(field_reg); |
| 981 __ Push(value_reg); |
| 982 __ CallRuntime(kUpdateFieldCidRuntimeEntry, 2); |
| 983 __ Drop(2); // Drop the field and the value. |
| 984 } |
| 985 } else { |
| 986 ASSERT(compiler->is_optimizing()); |
| 987 ASSERT(deopt != NULL); |
| 988 // Field guard class has been initialized and is known. |
| 989 if (field_reg != kNoRegister) { |
| 990 __ LoadObject(field_reg, Field::ZoneHandle(field().raw()), PP); |
| 991 } |
| 992 if (value_cid == kDynamicCid) { |
| 993 // Field's guarded class id is fixed by value's class id is not known. |
| 994 __ tsti(value_reg, kSmiTagMask); |
| 995 |
| 996 if (field_cid != kSmiCid) { |
| 997 __ b(fail, EQ); |
| 998 __ LoadClassId(value_cid_reg, value_reg); |
| 999 __ CompareImmediate(value_cid_reg, field_cid, PP); |
| 1000 } |
| 1001 |
| 1002 if (field_has_length) { |
| 1003 __ b(fail, NE); |
| 1004 // Classes are same, perform guarded list length check. |
| 1005 ASSERT(field_reg != kNoRegister); |
| 1006 ASSERT(value_cid_reg != kNoRegister); |
| 1007 FieldAddress field_length_operand( |
| 1008 field_reg, Field::guarded_list_length_offset()); |
| 1009 if ((field_cid == kArrayCid) || (field_cid == kImmutableArrayCid)) { |
| 1010 // Destroy value_cid_reg (safe because we are finished with it). |
| 1011 __ LoadFieldFromOffset( |
| 1012 value_cid_reg, value_reg, Array::length_offset()); |
| 1013 } else if (RawObject::IsTypedDataClassId(field_cid)) { |
| 1014 // Destroy value_cid_reg (safe because we are finished with it). |
| 1015 __ LoadFieldFromOffset( |
| 1016 value_cid_reg, value_reg, TypedData::length_offset()); |
| 1017 } |
| 1018 __ ldr(TMP, field_length_operand); |
| 1019 __ CompareRegisters(value_cid_reg, TMP); |
| 1020 } |
| 1021 |
| 1022 if (field().is_nullable() && (field_cid != kNullCid)) { |
| 1023 __ b(&ok, EQ); |
| 1024 __ CompareObject(value_reg, Object::null_object(), PP); |
| 1025 } |
| 1026 __ b(fail, NE); |
| 1027 } else { |
| 1028 // Both value's and field's class id is known. |
| 1029 if ((value_cid != field_cid) && (value_cid != nullability)) { |
| 1030 __ b(fail); |
| 1031 } else if (field_has_length && (value_cid == field_cid)) { |
| 1032 ASSERT(value_cid_reg != kNoRegister); |
| 1033 if ((field_cid == kArrayCid) || (field_cid == kImmutableArrayCid)) { |
| 1034 // Destroy value_cid_reg (safe because we are finished with it). |
| 1035 __ LoadFieldFromOffset( |
| 1036 value_cid_reg, value_reg, Array::length_offset()); |
| 1037 } else if (RawObject::IsTypedDataClassId(field_cid)) { |
| 1038 // Destroy value_cid_reg (safe because we are finished with it). |
| 1039 __ LoadFieldFromOffset( |
| 1040 value_cid_reg, value_reg, TypedData::length_offset()); |
| 1041 } |
| 1042 __ CompareImmediate(value_cid_reg, field_length, PP); |
| 1043 __ b(fail, NE); |
| 1044 } else { |
| 1045 UNREACHABLE(); |
| 1046 } |
| 1047 } |
| 1048 } |
| 1049 __ Bind(&ok); |
| 1050 } |
| 1051 |
| 1052 |
| 1053 class StoreInstanceFieldSlowPath : public SlowPathCode { |
| 1054 public: |
| 1055 StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction, |
| 1056 const Class& cls) |
| 1057 : instruction_(instruction), cls_(cls) { } |
| 1058 |
| 1059 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
| 1060 __ Comment("StoreInstanceFieldSlowPath"); |
| 1061 __ Bind(entry_label()); |
| 1062 |
| 1063 const Code& stub = |
| 1064 Code::Handle(StubCode::GetAllocationStubForClass(cls_)); |
| 1065 const ExternalLabel label(cls_.ToCString(), stub.EntryPoint()); |
| 1066 |
| 1067 LocationSummary* locs = instruction_->locs(); |
| 1068 locs->live_registers()->Remove(locs->out(0)); |
| 1069 |
| 1070 compiler->SaveLiveRegisters(locs); |
| 1071 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
| 1072 &label, |
| 1073 PcDescriptors::kOther, |
| 1074 locs); |
| 1075 __ mov(locs->temp(0).reg(), R0); |
| 1076 compiler->RestoreLiveRegisters(locs); |
| 1077 |
| 1078 __ b(exit_label()); |
| 1079 } |
| 1080 |
| 1081 private: |
| 1082 StoreInstanceFieldInstr* instruction_; |
| 1083 const Class& cls_; |
| 1084 }; |
450 | 1085 |
451 | 1086 |
452 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(bool opt) const { | 1087 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(bool opt) const { |
453 UNIMPLEMENTED(); | 1088 const intptr_t kNumInputs = 2; |
454 return NULL; | 1089 const intptr_t kNumTemps = 0; |
| 1090 LocationSummary* summary = |
| 1091 new LocationSummary(kNumInputs, kNumTemps, |
| 1092 !field().IsNull() && |
| 1093 ((field().guarded_cid() == kIllegalCid) || is_initialization_) |
| 1094 ? LocationSummary::kCallOnSlowPath |
| 1095 : LocationSummary::kNoCall); |
| 1096 |
| 1097 summary->set_in(0, Location::RequiresRegister()); |
| 1098 if (IsUnboxedStore() && opt) { |
| 1099 // TODO(zra): Implement when we add fpu loads and stores. |
| 1100 UNIMPLEMENTED(); |
| 1101 } else if (IsPotentialUnboxedStore()) { |
| 1102 summary->set_in(1, ShouldEmitStoreBarrier() |
| 1103 ? Location::WritableRegister() |
| 1104 : Location::RequiresRegister()); |
| 1105 summary->AddTemp(Location::RequiresRegister()); |
| 1106 summary->AddTemp(Location::RequiresRegister()); |
| 1107 } else { |
| 1108 summary->set_in(1, ShouldEmitStoreBarrier() |
| 1109 ? Location::WritableRegister() |
| 1110 : Location::RegisterOrConstant(value())); |
| 1111 } |
| 1112 return summary; |
455 } | 1113 } |
456 | 1114 |
457 | 1115 |
458 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1116 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
459 UNIMPLEMENTED(); | 1117 Label skip_store; |
| 1118 |
| 1119 Register instance_reg = locs()->in(0).reg(); |
| 1120 |
| 1121 if (IsUnboxedStore() && compiler->is_optimizing()) { |
| 1122 UNIMPLEMENTED(); |
| 1123 } |
| 1124 |
| 1125 if (IsPotentialUnboxedStore()) { |
| 1126 const Register temp = locs()->temp(0).reg(); |
| 1127 const Register temp2 = locs()->temp(1).reg(); |
| 1128 |
| 1129 Label store_pointer; |
| 1130 Label store_double; |
| 1131 Label store_float32x4; |
| 1132 Label store_float64x2; |
| 1133 |
| 1134 __ LoadObject(temp, Field::ZoneHandle(field().raw()), PP); |
| 1135 |
| 1136 __ LoadFieldFromOffset(temp2, temp, Field::is_nullable_offset()); |
| 1137 __ CompareImmediate(temp2, kNullCid, PP); |
| 1138 __ b(&store_pointer, EQ); |
| 1139 |
| 1140 __ LoadFromOffset( |
| 1141 temp2, temp, Field::kind_bits_offset() - kHeapObjectTag, kUnsignedByte); |
| 1142 __ tsti(temp2, 1 << Field::kUnboxingCandidateBit); |
| 1143 __ b(&store_pointer, EQ); |
| 1144 |
| 1145 __ LoadFieldFromOffset(temp2, temp, Field::guarded_cid_offset()); |
| 1146 __ CompareImmediate(temp2, kDoubleCid, PP); |
| 1147 __ b(&store_double, EQ); |
| 1148 |
| 1149 __ LoadFieldFromOffset(temp2, temp, Field::guarded_cid_offset()); |
| 1150 __ CompareImmediate(temp2, kFloat32x4Cid, PP); |
| 1151 __ b(&store_float32x4, EQ); |
| 1152 |
| 1153 __ LoadFieldFromOffset(temp2, temp, Field::guarded_cid_offset()); |
| 1154 __ CompareImmediate(temp2, kFloat64x2Cid, PP); |
| 1155 __ b(&store_float64x2, EQ); |
| 1156 |
| 1157 // Fall through. |
| 1158 __ b(&store_pointer); |
| 1159 |
| 1160 if (!compiler->is_optimizing()) { |
| 1161 locs()->live_registers()->Add(locs()->in(0)); |
| 1162 locs()->live_registers()->Add(locs()->in(1)); |
| 1163 } |
| 1164 |
| 1165 // TODO(zra): Implement these when we add fpu loads and stores. |
| 1166 { |
| 1167 __ Bind(&store_double); |
| 1168 __ hlt(0); // Unimplemented. |
| 1169 } |
| 1170 |
| 1171 { |
| 1172 __ Bind(&store_float32x4); |
| 1173 __ hlt(0); // Unimplemented. |
| 1174 } |
| 1175 |
| 1176 { |
| 1177 __ Bind(&store_float64x2); |
| 1178 __ hlt(0); // Unimplemented. |
| 1179 } |
| 1180 |
| 1181 __ Bind(&store_pointer); |
| 1182 } |
| 1183 |
| 1184 if (ShouldEmitStoreBarrier()) { |
| 1185 Register value_reg = locs()->in(1).reg(); |
| 1186 __ StoreIntoObject(instance_reg, |
| 1187 FieldAddress(instance_reg, offset_in_bytes_), |
| 1188 value_reg, |
| 1189 CanValueBeSmi()); |
| 1190 } else { |
| 1191 if (locs()->in(1).IsConstant()) { |
| 1192 __ StoreIntoObjectNoBarrier( |
| 1193 instance_reg, |
| 1194 FieldAddress(instance_reg, offset_in_bytes_), |
| 1195 locs()->in(1).constant()); |
| 1196 } else { |
| 1197 Register value_reg = locs()->in(1).reg(); |
| 1198 __ StoreIntoObjectNoBarrier(instance_reg, |
| 1199 FieldAddress(instance_reg, offset_in_bytes_), value_reg); |
| 1200 } |
| 1201 } |
| 1202 __ Bind(&skip_store); |
460 } | 1203 } |
461 | 1204 |
462 | 1205 |
463 LocationSummary* LoadStaticFieldInstr::MakeLocationSummary(bool opt) const { | 1206 LocationSummary* LoadStaticFieldInstr::MakeLocationSummary(bool opt) const { |
464 UNIMPLEMENTED(); | 1207 const intptr_t kNumInputs = 1; |
465 return NULL; | 1208 const intptr_t kNumTemps = 0; |
466 } | 1209 LocationSummary* summary = |
467 | 1210 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); |
468 | 1211 summary->set_in(0, Location::RequiresRegister()); |
| 1212 summary->set_out(0, Location::RequiresRegister()); |
| 1213 return summary; |
| 1214 } |
| 1215 |
| 1216 |
| 1217 // When the parser is building an implicit static getter for optimization, |
| 1218 // it can generate a function body where deoptimization ids do not line up |
| 1219 // with the unoptimized code. |
| 1220 // |
| 1221 // This is safe only so long as LoadStaticFieldInstr cannot deoptimize. |
469 void LoadStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1222 void LoadStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
470 UNIMPLEMENTED(); | 1223 Register field = locs()->in(0).reg(); |
| 1224 Register result = locs()->out(0).reg(); |
| 1225 __ LoadFieldFromOffset(result, field, Field::value_offset()); |
471 } | 1226 } |
472 | 1227 |
473 | 1228 |
474 LocationSummary* StoreStaticFieldInstr::MakeLocationSummary(bool opt) const { | 1229 LocationSummary* StoreStaticFieldInstr::MakeLocationSummary(bool opt) const { |
475 UNIMPLEMENTED(); | 1230 LocationSummary* locs = new LocationSummary(1, 1, LocationSummary::kNoCall); |
476 return NULL; | 1231 locs->set_in(0, value()->NeedsStoreBuffer() ? Location::WritableRegister() |
| 1232 : Location::RequiresRegister()); |
| 1233 locs->set_temp(0, Location::RequiresRegister()); |
| 1234 return locs; |
477 } | 1235 } |
478 | 1236 |
479 | 1237 |
480 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1238 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
481 UNIMPLEMENTED(); | 1239 Register value = locs()->in(0).reg(); |
482 } | 1240 Register temp = locs()->temp(0).reg(); |
483 | 1241 |
484 | 1242 __ LoadObject(temp, field(), PP); |
| 1243 if (this->value()->NeedsStoreBuffer()) { |
| 1244 __ StoreIntoObject(temp, |
| 1245 FieldAddress(temp, Field::value_offset()), value, CanValueBeSmi()); |
| 1246 } else { |
| 1247 __ StoreIntoObjectNoBarrier( |
| 1248 temp, FieldAddress(temp, Field::value_offset()), value); |
| 1249 } |
| 1250 } |
| 1251 |
| 1252 |
485 LocationSummary* InstanceOfInstr::MakeLocationSummary(bool opt) const { | 1253 LocationSummary* InstanceOfInstr::MakeLocationSummary(bool opt) const { |
486 UNIMPLEMENTED(); | 1254 UNIMPLEMENTED(); |
487 return NULL; | 1255 return NULL; |
488 } | 1256 } |
489 | 1257 |
490 | 1258 |
491 void InstanceOfInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1259 void InstanceOfInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
492 UNIMPLEMENTED(); | 1260 UNIMPLEMENTED(); |
493 } | 1261 } |
494 | 1262 |
495 | 1263 |
496 LocationSummary* CreateArrayInstr::MakeLocationSummary(bool opt) const { | 1264 LocationSummary* CreateArrayInstr::MakeLocationSummary(bool opt) const { |
497 UNIMPLEMENTED(); | 1265 const intptr_t kNumInputs = 2; |
498 return NULL; | 1266 const intptr_t kNumTemps = 0; |
| 1267 LocationSummary* locs = |
| 1268 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kCall); |
| 1269 locs->set_in(kElementTypePos, Location::RegisterLocation(R1)); |
| 1270 locs->set_in(kLengthPos, Location::RegisterLocation(R2)); |
| 1271 locs->set_out(0, Location::RegisterLocation(R0)); |
| 1272 return locs; |
499 } | 1273 } |
500 | 1274 |
501 | 1275 |
502 void CreateArrayInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1276 void CreateArrayInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
503 UNIMPLEMENTED(); | 1277 // Allocate the array. R2 = length, R1 = element type. |
| 1278 ASSERT(locs()->in(kElementTypePos).reg() == R1); |
| 1279 ASSERT(locs()->in(kLengthPos).reg() == R2); |
| 1280 compiler->GenerateCall(token_pos(), |
| 1281 &StubCode::AllocateArrayLabel(), |
| 1282 PcDescriptors::kOther, |
| 1283 locs()); |
| 1284 ASSERT(locs()->out(0).reg() == R0); |
504 } | 1285 } |
505 | 1286 |
506 | 1287 |
507 LocationSummary* LoadFieldInstr::MakeLocationSummary(bool opt) const { | 1288 LocationSummary* LoadFieldInstr::MakeLocationSummary(bool opt) const { |
508 UNIMPLEMENTED(); | 1289 const intptr_t kNumInputs = 1; |
509 return NULL; | 1290 const intptr_t kNumTemps = 0; |
| 1291 LocationSummary* locs = |
| 1292 new LocationSummary( |
| 1293 kNumInputs, kNumTemps, |
| 1294 (opt && !IsPotentialUnboxedLoad()) |
| 1295 ? LocationSummary::kNoCall |
| 1296 : LocationSummary::kCallOnSlowPath); |
| 1297 |
| 1298 locs->set_in(0, Location::RequiresRegister()); |
| 1299 |
| 1300 if (IsUnboxedLoad() && opt) { |
| 1301 // TODO(zra): Implement when we add fpu loads and stores. |
| 1302 UNIMPLEMENTED(); |
| 1303 } else if (IsPotentialUnboxedLoad()) { |
| 1304 locs->AddTemp(Location::RequiresRegister()); |
| 1305 } |
| 1306 locs->set_out(0, Location::RequiresRegister()); |
| 1307 return locs; |
510 } | 1308 } |
511 | 1309 |
512 | 1310 |
513 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1311 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
514 UNIMPLEMENTED(); | 1312 Register instance_reg = locs()->in(0).reg(); |
| 1313 if (IsUnboxedLoad() && compiler->is_optimizing()) { |
| 1314 UNIMPLEMENTED(); |
| 1315 } |
| 1316 |
| 1317 Label done; |
| 1318 Register result_reg = locs()->out(0).reg(); |
| 1319 if (IsPotentialUnboxedLoad()) { |
| 1320 const Register temp = locs()->temp(0).reg(); |
| 1321 |
| 1322 Label load_pointer; |
| 1323 Label load_double; |
| 1324 Label load_float32x4; |
| 1325 Label load_float64x2; |
| 1326 |
| 1327 __ LoadObject(result_reg, Field::ZoneHandle(field()->raw()), PP); |
| 1328 |
| 1329 FieldAddress field_cid_operand(result_reg, Field::guarded_cid_offset()); |
| 1330 FieldAddress field_nullability_operand(result_reg, |
| 1331 Field::is_nullable_offset()); |
| 1332 |
| 1333 __ ldr(temp, field_nullability_operand); |
| 1334 __ CompareImmediate(temp, kNullCid, PP); |
| 1335 __ b(&load_pointer, EQ); |
| 1336 |
| 1337 __ ldr(temp, field_cid_operand); |
| 1338 __ CompareImmediate(temp, kDoubleCid, PP); |
| 1339 __ b(&load_double, EQ); |
| 1340 |
| 1341 __ ldr(temp, field_cid_operand); |
| 1342 __ CompareImmediate(temp, kFloat32x4Cid, PP); |
| 1343 __ b(&load_float32x4, EQ); |
| 1344 |
| 1345 __ ldr(temp, field_cid_operand); |
| 1346 __ CompareImmediate(temp, kFloat64x2Cid, PP); |
| 1347 __ b(&load_float64x2, EQ); |
| 1348 |
| 1349 // Fall through. |
| 1350 __ b(&load_pointer); |
| 1351 |
| 1352 if (!compiler->is_optimizing()) { |
| 1353 locs()->live_registers()->Add(locs()->in(0)); |
| 1354 } |
| 1355 |
| 1356 // TODO(zra): Implement these when we add fpu loads and stores. |
| 1357 { |
| 1358 __ Bind(&load_double); |
| 1359 __ hlt(0); // Unimplemented. |
| 1360 } |
| 1361 |
| 1362 { |
| 1363 __ Bind(&load_float32x4); |
| 1364 __ hlt(0); // Unimplemented. |
| 1365 } |
| 1366 |
| 1367 { |
| 1368 __ Bind(&load_float64x2); |
| 1369 __ hlt(0); // Unimplemented. |
| 1370 } |
| 1371 |
| 1372 __ Bind(&load_pointer); |
| 1373 } |
| 1374 __ LoadFromOffset( |
| 1375 result_reg, instance_reg, offset_in_bytes() - kHeapObjectTag); |
| 1376 __ Bind(&done); |
515 } | 1377 } |
516 | 1378 |
517 | 1379 |
518 LocationSummary* InstantiateTypeInstr::MakeLocationSummary(bool opt) const { | 1380 LocationSummary* InstantiateTypeInstr::MakeLocationSummary(bool opt) const { |
519 UNIMPLEMENTED(); | 1381 UNIMPLEMENTED(); |
520 return NULL; | 1382 return NULL; |
521 } | 1383 } |
522 | 1384 |
523 | 1385 |
524 void InstantiateTypeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1386 void InstantiateTypeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
525 UNIMPLEMENTED(); | 1387 UNIMPLEMENTED(); |
526 } | 1388 } |
527 | 1389 |
528 | 1390 |
529 LocationSummary* InstantiateTypeArgumentsInstr::MakeLocationSummary( | 1391 LocationSummary* InstantiateTypeArgumentsInstr::MakeLocationSummary( |
530 bool opt) const { | 1392 bool opt) const { |
531 UNIMPLEMENTED(); | 1393 UNIMPLEMENTED(); |
532 return NULL; | 1394 return NULL; |
533 } | 1395 } |
534 | 1396 |
535 | 1397 |
536 void InstantiateTypeArgumentsInstr::EmitNativeCode( | 1398 void InstantiateTypeArgumentsInstr::EmitNativeCode( |
537 FlowGraphCompiler* compiler) { | 1399 FlowGraphCompiler* compiler) { |
538 UNIMPLEMENTED(); | 1400 UNIMPLEMENTED(); |
539 } | 1401 } |
540 | 1402 |
541 | 1403 |
542 LocationSummary* AllocateContextInstr::MakeLocationSummary(bool opt) const { | 1404 LocationSummary* AllocateContextInstr::MakeLocationSummary(bool opt) const { |
543 UNIMPLEMENTED(); | 1405 const intptr_t kNumInputs = 0; |
544 return NULL; | 1406 const intptr_t kNumTemps = 1; |
| 1407 LocationSummary* locs = |
| 1408 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kCall); |
| 1409 locs->set_temp(0, Location::RegisterLocation(R1)); |
| 1410 locs->set_out(0, Location::RegisterLocation(R0)); |
| 1411 return locs; |
545 } | 1412 } |
546 | 1413 |
547 | 1414 |
548 void AllocateContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1415 void AllocateContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
549 UNIMPLEMENTED(); | 1416 ASSERT(locs()->temp(0).reg() == R1); |
| 1417 ASSERT(locs()->out(0).reg() == R0); |
| 1418 |
| 1419 __ LoadImmediate(R1, num_context_variables(), PP); |
| 1420 const ExternalLabel label("alloc_context", |
| 1421 StubCode::AllocateContextEntryPoint()); |
| 1422 compiler->GenerateCall(token_pos(), |
| 1423 &label, |
| 1424 PcDescriptors::kOther, |
| 1425 locs()); |
550 } | 1426 } |
551 | 1427 |
552 | 1428 |
553 LocationSummary* CloneContextInstr::MakeLocationSummary(bool opt) const { | 1429 LocationSummary* CloneContextInstr::MakeLocationSummary(bool opt) const { |
554 UNIMPLEMENTED(); | 1430 UNIMPLEMENTED(); |
555 return NULL; | 1431 return NULL; |
556 } | 1432 } |
557 | 1433 |
558 | 1434 |
559 void CloneContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1435 void CloneContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
560 UNIMPLEMENTED(); | 1436 UNIMPLEMENTED(); |
561 } | 1437 } |
562 | 1438 |
563 | 1439 |
564 LocationSummary* CatchBlockEntryInstr::MakeLocationSummary(bool opt) const { | 1440 LocationSummary* CatchBlockEntryInstr::MakeLocationSummary(bool opt) const { |
565 UNIMPLEMENTED(); | 1441 UNREACHABLE(); |
566 return NULL; | 1442 return NULL; |
567 } | 1443 } |
568 | 1444 |
569 | 1445 |
570 void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1446 void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
571 UNIMPLEMENTED(); | 1447 __ Bind(compiler->GetJumpLabel(this)); |
| 1448 compiler->AddExceptionHandler(catch_try_index(), |
| 1449 try_index(), |
| 1450 compiler->assembler()->CodeSize(), |
| 1451 catch_handler_types_, |
| 1452 needs_stacktrace()); |
| 1453 |
| 1454 // Restore the pool pointer. |
| 1455 __ LoadPoolPointer(PP); |
| 1456 |
| 1457 if (HasParallelMove()) { |
| 1458 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); |
| 1459 } |
| 1460 |
| 1461 // Restore SP from FP as we are coming from a throw and the code for |
| 1462 // popping arguments has not been run. |
| 1463 const intptr_t fp_sp_dist = |
| 1464 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; |
| 1465 ASSERT(fp_sp_dist <= 0); |
| 1466 __ AddImmediate(SP, FP, fp_sp_dist, PP); |
| 1467 |
| 1468 // Restore stack and initialize the two exception variables: |
| 1469 // exception and stack trace variables. |
| 1470 __ StoreToOffset(kExceptionObjectReg, |
| 1471 FP, exception_var().index() * kWordSize); |
| 1472 __ StoreToOffset(kStackTraceObjectReg, |
| 1473 FP, stacktrace_var().index() * kWordSize); |
572 } | 1474 } |
573 | 1475 |
574 | 1476 |
575 LocationSummary* CheckStackOverflowInstr::MakeLocationSummary(bool opt) const { | 1477 LocationSummary* CheckStackOverflowInstr::MakeLocationSummary(bool opt) const { |
576 const intptr_t kNumInputs = 0; | 1478 const intptr_t kNumInputs = 0; |
577 const intptr_t kNumTemps = 1; | 1479 const intptr_t kNumTemps = 1; |
578 LocationSummary* summary = | 1480 LocationSummary* summary = |
579 new LocationSummary(kNumInputs, | 1481 new LocationSummary(kNumInputs, |
580 kNumTemps, | 1482 kNumTemps, |
581 LocationSummary::kCallOnSlowPath); | 1483 LocationSummary::kCallOnSlowPath); |
(...skipping 801 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1383 compiler->GenerateRuntimeCall(token_pos(), | 2285 compiler->GenerateRuntimeCall(token_pos(), |
1384 deopt_id(), | 2286 deopt_id(), |
1385 kThrowRuntimeEntry, | 2287 kThrowRuntimeEntry, |
1386 1, | 2288 1, |
1387 locs()); | 2289 locs()); |
1388 __ hlt(0); | 2290 __ hlt(0); |
1389 } | 2291 } |
1390 | 2292 |
1391 | 2293 |
1392 LocationSummary* ReThrowInstr::MakeLocationSummary(bool opt) const { | 2294 LocationSummary* ReThrowInstr::MakeLocationSummary(bool opt) const { |
1393 UNIMPLEMENTED(); | 2295 return new LocationSummary(0, 0, LocationSummary::kCall); |
1394 return NULL; | |
1395 } | 2296 } |
1396 | 2297 |
1397 | 2298 |
1398 void ReThrowInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2299 void ReThrowInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1399 UNIMPLEMENTED(); | 2300 compiler->SetNeedsStacktrace(catch_try_index()); |
| 2301 compiler->GenerateRuntimeCall(token_pos(), |
| 2302 deopt_id(), |
| 2303 kReThrowRuntimeEntry, |
| 2304 2, |
| 2305 locs()); |
| 2306 __ hlt(0); |
1400 } | 2307 } |
1401 | 2308 |
1402 | 2309 |
1403 void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2310 void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1404 if (!compiler->CanFallThroughTo(normal_entry())) { | 2311 if (!compiler->CanFallThroughTo(normal_entry())) { |
1405 __ b(compiler->GetJumpLabel(normal_entry())); | 2312 __ b(compiler->GetJumpLabel(normal_entry())); |
1406 } | 2313 } |
1407 } | 2314 } |
1408 | 2315 |
1409 | 2316 |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1448 | 2355 |
1449 // We can fall through if the successor is the next block in the list. | 2356 // We can fall through if the successor is the next block in the list. |
1450 // Otherwise, we need a jump. | 2357 // Otherwise, we need a jump. |
1451 if (!compiler->CanFallThroughTo(successor())) { | 2358 if (!compiler->CanFallThroughTo(successor())) { |
1452 __ b(compiler->GetJumpLabel(successor())); | 2359 __ b(compiler->GetJumpLabel(successor())); |
1453 } | 2360 } |
1454 } | 2361 } |
1455 | 2362 |
1456 | 2363 |
1457 LocationSummary* CurrentContextInstr::MakeLocationSummary(bool opt) const { | 2364 LocationSummary* CurrentContextInstr::MakeLocationSummary(bool opt) const { |
1458 UNIMPLEMENTED(); | 2365 return LocationSummary::Make(0, |
1459 return NULL; | 2366 Location::RequiresRegister(), |
| 2367 LocationSummary::kNoCall); |
1460 } | 2368 } |
1461 | 2369 |
1462 | 2370 |
1463 void CurrentContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2371 void CurrentContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1464 UNIMPLEMENTED(); | 2372 __ mov(locs()->out(0).reg(), CTX); |
1465 } | 2373 } |
1466 | 2374 |
1467 | 2375 |
1468 static Condition NegateCondition(Condition condition) { | 2376 static Condition NegateCondition(Condition condition) { |
1469 switch (condition) { | 2377 switch (condition) { |
1470 case EQ: return NE; | 2378 case EQ: return NE; |
1471 case NE: return EQ; | 2379 case NE: return EQ; |
1472 case LT: return GE; | 2380 case LT: return GE; |
1473 case LE: return GT; | 2381 case LE: return GT; |
1474 case GT: return LE; | 2382 case GT: return LE; |
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1612 compiler->GenerateCall(token_pos(), | 2520 compiler->GenerateCall(token_pos(), |
1613 &label, | 2521 &label, |
1614 PcDescriptors::kOther, | 2522 PcDescriptors::kOther, |
1615 locs()); | 2523 locs()); |
1616 __ Drop(ArgumentCount()); // Discard arguments. | 2524 __ Drop(ArgumentCount()); // Discard arguments. |
1617 } | 2525 } |
1618 | 2526 |
1619 } // namespace dart | 2527 } // namespace dart |
1620 | 2528 |
1621 #endif // defined TARGET_ARCH_ARM64 | 2529 #endif // defined TARGET_ARCH_ARM64 |
OLD | NEW |