OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. |
6 #if defined(TARGET_ARCH_MIPS) | 6 #if defined(TARGET_ARCH_MIPS) |
7 | 7 |
8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
9 | 9 |
10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
47 LocationSummary* locs = new(zone) LocationSummary( | 47 LocationSummary* locs = new(zone) LocationSummary( |
48 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); | 48 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); |
49 locs->set_in(0, Location::AnyOrConstant(value())); | 49 locs->set_in(0, Location::AnyOrConstant(value())); |
50 return locs; | 50 return locs; |
51 } | 51 } |
52 | 52 |
53 | 53 |
54 void PushArgumentInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 54 void PushArgumentInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
55 // In SSA mode, we need an explicit push. Nothing to do in non-SSA mode | 55 // In SSA mode, we need an explicit push. Nothing to do in non-SSA mode |
56 // where PushArgument is handled by BindInstr::EmitNativeCode. | 56 // where PushArgument is handled by BindInstr::EmitNativeCode. |
57 __ TraceSimMsg("PushArgumentInstr"); | 57 __ Comment("PushArgumentInstr"); |
58 if (compiler->is_optimizing()) { | 58 if (compiler->is_optimizing()) { |
59 Location value = locs()->in(0); | 59 Location value = locs()->in(0); |
60 if (value.IsRegister()) { | 60 if (value.IsRegister()) { |
61 __ Push(value.reg()); | 61 __ Push(value.reg()); |
62 } else if (value.IsConstant()) { | 62 } else if (value.IsConstant()) { |
63 __ PushObject(value.constant()); | 63 __ PushObject(value.constant()); |
64 } else { | 64 } else { |
65 ASSERT(value.IsStackSlot()); | 65 ASSERT(value.IsStackSlot()); |
66 const intptr_t value_offset = value.ToStackSlotOffset(); | 66 const intptr_t value_offset = value.ToStackSlotOffset(); |
67 __ LoadFromOffset(TMP, FP, value_offset); | 67 __ LoadFromOffset(TMP, FP, value_offset); |
(...skipping 11 matching lines...) Expand all Loading... |
79 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); | 79 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); |
80 locs->set_in(0, Location::RegisterLocation(V0)); | 80 locs->set_in(0, Location::RegisterLocation(V0)); |
81 return locs; | 81 return locs; |
82 } | 82 } |
83 | 83 |
84 | 84 |
85 // Attempt optimized compilation at return instruction instead of at the entry. | 85 // Attempt optimized compilation at return instruction instead of at the entry. |
86 // The entry needs to be patchable, no inlined objects are allowed in the area | 86 // The entry needs to be patchable, no inlined objects are allowed in the area |
87 // that will be overwritten by the patch instructions: a branch macro sequence. | 87 // that will be overwritten by the patch instructions: a branch macro sequence. |
88 void ReturnInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 88 void ReturnInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
89 __ TraceSimMsg("ReturnInstr"); | 89 __ Comment("ReturnInstr"); |
90 Register result = locs()->in(0).reg(); | 90 Register result = locs()->in(0).reg(); |
91 ASSERT(result == V0); | 91 ASSERT(result == V0); |
92 | 92 |
93 if (compiler->intrinsic_mode()) { | 93 if (compiler->intrinsic_mode()) { |
94 // Intrinsics don't have a frame. | 94 // Intrinsics don't have a frame. |
95 __ Ret(); | 95 __ Ret(); |
96 return; | 96 return; |
97 } | 97 } |
98 | 98 |
99 #if defined(DEBUG) | 99 #if defined(DEBUG) |
100 Label stack_ok; | 100 Label stack_ok; |
101 __ Comment("Stack Check"); | 101 __ Comment("Stack Check"); |
102 __ TraceSimMsg("Stack Check"); | |
103 const intptr_t fp_sp_dist = | 102 const intptr_t fp_sp_dist = |
104 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; | 103 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; |
105 ASSERT(fp_sp_dist <= 0); | 104 ASSERT(fp_sp_dist <= 0); |
106 __ subu(CMPRES1, SP, FP); | 105 __ subu(CMPRES1, SP, FP); |
107 | 106 |
108 __ BranchEqual(CMPRES1, Immediate(fp_sp_dist), &stack_ok); | 107 __ BranchEqual(CMPRES1, Immediate(fp_sp_dist), &stack_ok); |
109 __ break_(0); | 108 __ break_(0); |
110 | 109 |
111 __ Bind(&stack_ok); | 110 __ Bind(&stack_ok); |
112 #endif | 111 #endif |
(...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
302 LocationSummary* LoadLocalInstr::MakeLocationSummary(Zone* zone, | 301 LocationSummary* LoadLocalInstr::MakeLocationSummary(Zone* zone, |
303 bool opt) const { | 302 bool opt) const { |
304 return LocationSummary::Make(zone, | 303 return LocationSummary::Make(zone, |
305 0, | 304 0, |
306 Location::RequiresRegister(), | 305 Location::RequiresRegister(), |
307 LocationSummary::kNoCall); | 306 LocationSummary::kNoCall); |
308 } | 307 } |
309 | 308 |
310 | 309 |
311 void LoadLocalInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 310 void LoadLocalInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
312 __ TraceSimMsg("LoadLocalInstr"); | 311 __ Comment("LoadLocalInstr"); |
313 Register result = locs()->out(0).reg(); | 312 Register result = locs()->out(0).reg(); |
314 __ LoadFromOffset(result, FP, local().index() * kWordSize); | 313 __ LoadFromOffset(result, FP, local().index() * kWordSize); |
315 } | 314 } |
316 | 315 |
317 | 316 |
318 LocationSummary* StoreLocalInstr::MakeLocationSummary(Zone* zone, | 317 LocationSummary* StoreLocalInstr::MakeLocationSummary(Zone* zone, |
319 bool opt) const { | 318 bool opt) const { |
320 return LocationSummary::Make(zone, | 319 return LocationSummary::Make(zone, |
321 1, | 320 1, |
322 Location::SameAsFirstInput(), | 321 Location::SameAsFirstInput(), |
323 LocationSummary::kNoCall); | 322 LocationSummary::kNoCall); |
324 } | 323 } |
325 | 324 |
326 | 325 |
327 void StoreLocalInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 326 void StoreLocalInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
328 __ TraceSimMsg("StoreLocalInstr"); | 327 __ Comment("StoreLocalInstr"); |
329 Register value = locs()->in(0).reg(); | 328 Register value = locs()->in(0).reg(); |
330 Register result = locs()->out(0).reg(); | 329 Register result = locs()->out(0).reg(); |
331 ASSERT(result == value); // Assert that register assignment is correct. | 330 ASSERT(result == value); // Assert that register assignment is correct. |
332 __ StoreToOffset(value, FP, local().index() * kWordSize); | 331 __ StoreToOffset(value, FP, local().index() * kWordSize); |
333 } | 332 } |
334 | 333 |
335 | 334 |
336 LocationSummary* ConstantInstr::MakeLocationSummary(Zone* zone, | 335 LocationSummary* ConstantInstr::MakeLocationSummary(Zone* zone, |
337 bool opt) const { | 336 bool opt) const { |
338 return LocationSummary::Make(zone, | 337 return LocationSummary::Make(zone, |
339 0, | 338 0, |
340 Location::RequiresRegister(), | 339 Location::RequiresRegister(), |
341 LocationSummary::kNoCall); | 340 LocationSummary::kNoCall); |
342 } | 341 } |
343 | 342 |
344 | 343 |
345 void ConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 344 void ConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
346 // The register allocator drops constant definitions that have no uses. | 345 // The register allocator drops constant definitions that have no uses. |
347 if (!locs()->out(0).IsInvalid()) { | 346 if (!locs()->out(0).IsInvalid()) { |
348 __ TraceSimMsg("ConstantInstr"); | 347 __ Comment("ConstantInstr"); |
349 Register result = locs()->out(0).reg(); | 348 Register result = locs()->out(0).reg(); |
350 __ LoadObject(result, value()); | 349 __ LoadObject(result, value()); |
351 } | 350 } |
352 } | 351 } |
353 | 352 |
354 | 353 |
355 LocationSummary* UnboxedConstantInstr::MakeLocationSummary(Zone* zone, | 354 LocationSummary* UnboxedConstantInstr::MakeLocationSummary(Zone* zone, |
356 bool opt) const { | 355 bool opt) const { |
357 const intptr_t kNumInputs = 0; | 356 const intptr_t kNumInputs = 0; |
358 const intptr_t kNumTemps = (representation_ == kUnboxedInt32) ? 0 : 1; | 357 const intptr_t kNumTemps = (representation_ == kUnboxedInt32) ? 0 : 1; |
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
450 // We should never return here. | 449 // We should never return here. |
451 __ break_(0); | 450 __ break_(0); |
452 __ Bind(&done); | 451 __ Bind(&done); |
453 } | 452 } |
454 | 453 |
455 | 454 |
456 void AssertBooleanInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 455 void AssertBooleanInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
457 Register obj = locs()->in(0).reg(); | 456 Register obj = locs()->in(0).reg(); |
458 Register result = locs()->out(0).reg(); | 457 Register result = locs()->out(0).reg(); |
459 | 458 |
460 __ TraceSimMsg("AssertBooleanInstr"); | 459 __ Comment("AssertBooleanInstr"); |
461 EmitAssertBoolean(obj, token_pos(), deopt_id(), locs(), compiler); | 460 EmitAssertBoolean(obj, token_pos(), deopt_id(), locs(), compiler); |
462 ASSERT(obj == result); | 461 ASSERT(obj == result); |
463 } | 462 } |
464 | 463 |
465 | 464 |
466 LocationSummary* EqualityCompareInstr::MakeLocationSummary(Zone* zone, | 465 LocationSummary* EqualityCompareInstr::MakeLocationSummary(Zone* zone, |
467 bool opt) const { | 466 bool opt) const { |
468 const intptr_t kNumInputs = 2; | 467 const intptr_t kNumInputs = 2; |
469 if (operation_cid() == kMintCid) { | 468 if (operation_cid() == kMintCid) { |
470 const intptr_t kNumTemps = 0; | 469 const intptr_t kNumTemps = 0; |
(...skipping 30 matching lines...) Expand all Loading... |
501 } | 500 } |
502 UNREACHABLE(); | 501 UNREACHABLE(); |
503 return NULL; | 502 return NULL; |
504 } | 503 } |
505 | 504 |
506 | 505 |
507 static void LoadValueCid(FlowGraphCompiler* compiler, | 506 static void LoadValueCid(FlowGraphCompiler* compiler, |
508 Register value_cid_reg, | 507 Register value_cid_reg, |
509 Register value_reg, | 508 Register value_reg, |
510 Label* value_is_smi = NULL) { | 509 Label* value_is_smi = NULL) { |
511 __ TraceSimMsg("LoadValueCid"); | 510 __ Comment("LoadValueCid"); |
512 Label done; | 511 Label done; |
513 if (value_is_smi == NULL) { | 512 if (value_is_smi == NULL) { |
514 __ LoadImmediate(value_cid_reg, kSmiCid); | 513 __ LoadImmediate(value_cid_reg, kSmiCid); |
515 } | 514 } |
516 __ andi(CMPRES1, value_reg, Immediate(kSmiTagMask)); | 515 __ andi(CMPRES1, value_reg, Immediate(kSmiTagMask)); |
517 if (value_is_smi == NULL) { | 516 if (value_is_smi == NULL) { |
518 __ beq(CMPRES1, ZR, &done); | 517 __ beq(CMPRES1, ZR, &done); |
519 } else { | 518 } else { |
520 __ beq(CMPRES1, ZR, value_is_smi); | 519 __ beq(CMPRES1, ZR, value_is_smi); |
521 } | 520 } |
(...skipping 29 matching lines...) Expand all Loading... |
551 UNREACHABLE(); | 550 UNREACHABLE(); |
552 return NV; | 551 return NV; |
553 } | 552 } |
554 } | 553 } |
555 | 554 |
556 | 555 |
557 // The comparison code to emit is specified by true_condition. | 556 // The comparison code to emit is specified by true_condition. |
558 static void EmitBranchOnCondition(FlowGraphCompiler* compiler, | 557 static void EmitBranchOnCondition(FlowGraphCompiler* compiler, |
559 Condition true_condition, | 558 Condition true_condition, |
560 BranchLabels labels) { | 559 BranchLabels labels) { |
561 __ TraceSimMsg("ControlInstruction::EmitBranchOnCondition"); | 560 __ Comment("ControlInstruction::EmitBranchOnCondition"); |
562 if (labels.fall_through == labels.false_label) { | 561 if (labels.fall_through == labels.false_label) { |
563 // If the next block is the false successor, fall through to it. | 562 // If the next block is the false successor, fall through to it. |
564 __ BranchOnCondition(true_condition, labels.true_label); | 563 __ BranchOnCondition(true_condition, labels.true_label); |
565 } else { | 564 } else { |
566 // If the next block is not the false successor, branch to it. | 565 // If the next block is not the false successor, branch to it. |
567 Condition false_condition = NegateCondition(true_condition); | 566 Condition false_condition = NegateCondition(true_condition); |
568 __ BranchOnCondition(false_condition, labels.false_label); | 567 __ BranchOnCondition(false_condition, labels.false_label); |
569 // Fall through or jump to the true successor. | 568 // Fall through or jump to the true successor. |
570 if (labels.fall_through != labels.true_label) { | 569 if (labels.fall_through != labels.true_label) { |
571 __ b(labels.true_label); | 570 __ b(labels.true_label); |
572 } | 571 } |
573 } | 572 } |
574 } | 573 } |
575 | 574 |
576 | 575 |
577 static Condition EmitSmiComparisonOp(FlowGraphCompiler* compiler, | 576 static Condition EmitSmiComparisonOp(FlowGraphCompiler* compiler, |
578 const LocationSummary& locs, | 577 const LocationSummary& locs, |
579 Token::Kind kind) { | 578 Token::Kind kind) { |
580 __ TraceSimMsg("EmitSmiComparisonOp"); | |
581 __ Comment("EmitSmiComparisonOp"); | 579 __ Comment("EmitSmiComparisonOp"); |
582 const Location left = locs.in(0); | 580 const Location left = locs.in(0); |
583 const Location right = locs.in(1); | 581 const Location right = locs.in(1); |
584 ASSERT(!left.IsConstant() || !right.IsConstant()); | 582 ASSERT(!left.IsConstant() || !right.IsConstant()); |
585 ASSERT(left.IsRegister() || left.IsConstant()); | 583 ASSERT(left.IsRegister() || left.IsConstant()); |
586 ASSERT(right.IsRegister() || right.IsConstant()); | 584 ASSERT(right.IsRegister() || right.IsConstant()); |
587 | 585 |
588 int16_t imm = 0; | 586 int16_t imm = 0; |
589 const Register left_reg = left.IsRegister() ? | 587 const Register left_reg = left.IsRegister() ? |
590 left.reg() : __ LoadConditionOperand(CMPRES1, left.constant(), &imm); | 588 left.reg() : __ LoadConditionOperand(CMPRES1, left.constant(), &imm); |
591 const Register right_reg = right.IsRegister() ? | 589 const Register right_reg = right.IsRegister() ? |
592 right.reg() : __ LoadConditionOperand(CMPRES2, right.constant(), &imm); | 590 right.reg() : __ LoadConditionOperand(CMPRES2, right.constant(), &imm); |
593 return Condition(left_reg, right_reg, TokenKindToIntRelOp(kind), imm); | 591 return Condition(left_reg, right_reg, TokenKindToIntRelOp(kind), imm); |
594 } | 592 } |
595 | 593 |
596 | 594 |
597 static Condition EmitUnboxedMintEqualityOp(FlowGraphCompiler* compiler, | 595 static Condition EmitUnboxedMintEqualityOp(FlowGraphCompiler* compiler, |
598 const LocationSummary& locs, | 596 const LocationSummary& locs, |
599 Token::Kind kind, | 597 Token::Kind kind, |
600 BranchLabels labels) { | 598 BranchLabels labels) { |
601 __ TraceSimMsg("EmitUnboxedMintEqualityOp"); | |
602 __ Comment("EmitUnboxedMintEqualityOp"); | 599 __ Comment("EmitUnboxedMintEqualityOp"); |
603 ASSERT(Token::IsEqualityOperator(kind)); | 600 ASSERT(Token::IsEqualityOperator(kind)); |
604 PairLocation* left_pair = locs.in(0).AsPairLocation(); | 601 PairLocation* left_pair = locs.in(0).AsPairLocation(); |
605 Register left_lo = left_pair->At(0).reg(); | 602 Register left_lo = left_pair->At(0).reg(); |
606 Register left_hi = left_pair->At(1).reg(); | 603 Register left_hi = left_pair->At(1).reg(); |
607 PairLocation* right_pair = locs.in(1).AsPairLocation(); | 604 PairLocation* right_pair = locs.in(1).AsPairLocation(); |
608 Register right_lo = right_pair->At(0).reg(); | 605 Register right_lo = right_pair->At(0).reg(); |
609 Register right_hi = right_pair->At(1).reg(); | 606 Register right_hi = right_pair->At(1).reg(); |
610 | 607 |
611 if (labels.false_label == NULL) { | 608 if (labels.false_label == NULL) { |
(...skipping 11 matching lines...) Expand all Loading... |
623 } | 620 } |
624 return Condition(left_lo, right_lo, TokenKindToUintRelOp(kind)); | 621 return Condition(left_lo, right_lo, TokenKindToUintRelOp(kind)); |
625 } | 622 } |
626 } | 623 } |
627 | 624 |
628 | 625 |
629 static Condition EmitUnboxedMintComparisonOp(FlowGraphCompiler* compiler, | 626 static Condition EmitUnboxedMintComparisonOp(FlowGraphCompiler* compiler, |
630 const LocationSummary& locs, | 627 const LocationSummary& locs, |
631 Token::Kind kind, | 628 Token::Kind kind, |
632 BranchLabels labels) { | 629 BranchLabels labels) { |
633 __ TraceSimMsg("EmitUnboxedMintComparisonOp"); | |
634 __ Comment("EmitUnboxedMintComparisonOp"); | 630 __ Comment("EmitUnboxedMintComparisonOp"); |
635 PairLocation* left_pair = locs.in(0).AsPairLocation(); | 631 PairLocation* left_pair = locs.in(0).AsPairLocation(); |
636 Register left_lo = left_pair->At(0).reg(); | 632 Register left_lo = left_pair->At(0).reg(); |
637 Register left_hi = left_pair->At(1).reg(); | 633 Register left_hi = left_pair->At(1).reg(); |
638 PairLocation* right_pair = locs.in(1).AsPairLocation(); | 634 PairLocation* right_pair = locs.in(1).AsPairLocation(); |
639 Register right_lo = right_pair->At(0).reg(); | 635 Register right_lo = right_pair->At(0).reg(); |
640 Register right_hi = right_pair->At(1).reg(); | 636 Register right_hi = right_pair->At(1).reg(); |
641 | 637 |
642 if (labels.false_label == NULL) { | 638 if (labels.false_label == NULL) { |
643 // Generate branch-free code (except for skipping the lower words compare). | 639 // Generate branch-free code (except for skipping the lower words compare). |
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
766 __ LoadObject(result, Bool::False()); | 762 __ LoadObject(result, Bool::False()); |
767 __ b(&done); | 763 __ b(&done); |
768 __ Bind(&is_true); | 764 __ Bind(&is_true); |
769 __ LoadObject(result, Bool::True()); | 765 __ LoadObject(result, Bool::True()); |
770 __ Bind(&done); | 766 __ Bind(&done); |
771 } | 767 } |
772 | 768 |
773 | 769 |
774 void EqualityCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, | 770 void EqualityCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, |
775 BranchInstr* branch) { | 771 BranchInstr* branch) { |
776 __ TraceSimMsg("EqualityCompareInstr"); | 772 __ Comment("EqualityCompareInstr::EmitBranchCode"); |
777 __ Comment("EqualityCompareInstr:BranchCode"); | |
778 ASSERT((kind() == Token::kNE) || (kind() == Token::kEQ)); | 773 ASSERT((kind() == Token::kNE) || (kind() == Token::kEQ)); |
779 | 774 |
780 BranchLabels labels = compiler->CreateBranchLabels(branch); | 775 BranchLabels labels = compiler->CreateBranchLabels(branch); |
781 Condition true_condition = EmitComparisonCode(compiler, labels); | 776 Condition true_condition = EmitComparisonCode(compiler, labels); |
782 EmitBranchOnCondition(compiler, true_condition, labels); | 777 EmitBranchOnCondition(compiler, true_condition, labels); |
783 } | 778 } |
784 | 779 |
785 | 780 |
786 LocationSummary* TestSmiInstr::MakeLocationSummary(Zone* zone, | 781 LocationSummary* TestSmiInstr::MakeLocationSummary(Zone* zone, |
787 bool opt) const { | 782 bool opt) const { |
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
943 } else if (operation_cid() == kMintCid) { | 938 } else if (operation_cid() == kMintCid) { |
944 return EmitUnboxedMintComparisonOp(compiler, *locs(), kind(), labels); | 939 return EmitUnboxedMintComparisonOp(compiler, *locs(), kind(), labels); |
945 } else { | 940 } else { |
946 ASSERT(operation_cid() == kDoubleCid); | 941 ASSERT(operation_cid() == kDoubleCid); |
947 return EmitDoubleComparisonOp(compiler, *locs(), kind(), labels); | 942 return EmitDoubleComparisonOp(compiler, *locs(), kind(), labels); |
948 } | 943 } |
949 } | 944 } |
950 | 945 |
951 | 946 |
952 void RelationalOpInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 947 void RelationalOpInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
953 __ TraceSimMsg("RelationalOpInstr"); | 948 __ Comment("RelationalOpInstr"); |
954 | 949 |
955 Label is_true, is_false; | 950 Label is_true, is_false; |
956 BranchLabels labels = { &is_true, &is_false, &is_false }; | 951 BranchLabels labels = { &is_true, &is_false, &is_false }; |
957 Condition true_condition = EmitComparisonCode(compiler, labels); | 952 Condition true_condition = EmitComparisonCode(compiler, labels); |
958 EmitBranchOnCondition(compiler, true_condition, labels); | 953 EmitBranchOnCondition(compiler, true_condition, labels); |
959 | 954 |
960 Register result = locs()->out(0).reg(); | 955 Register result = locs()->out(0).reg(); |
961 Label done; | 956 Label done; |
962 __ Bind(&is_false); | 957 __ Bind(&is_false); |
963 __ LoadObject(result, Bool::False()); | 958 __ LoadObject(result, Bool::False()); |
964 __ b(&done); | 959 __ b(&done); |
965 __ Bind(&is_true); | 960 __ Bind(&is_true); |
966 __ LoadObject(result, Bool::True()); | 961 __ LoadObject(result, Bool::True()); |
967 __ Bind(&done); | 962 __ Bind(&done); |
968 } | 963 } |
969 | 964 |
970 | 965 |
971 void RelationalOpInstr::EmitBranchCode(FlowGraphCompiler* compiler, | 966 void RelationalOpInstr::EmitBranchCode(FlowGraphCompiler* compiler, |
972 BranchInstr* branch) { | 967 BranchInstr* branch) { |
973 __ TraceSimMsg("RelationalOpInstr"); | 968 __ Comment("RelationalOpInstr"); |
974 | 969 |
975 BranchLabels labels = compiler->CreateBranchLabels(branch); | 970 BranchLabels labels = compiler->CreateBranchLabels(branch); |
976 Condition true_condition = EmitComparisonCode(compiler, labels); | 971 Condition true_condition = EmitComparisonCode(compiler, labels); |
977 EmitBranchOnCondition(compiler, true_condition, labels); | 972 EmitBranchOnCondition(compiler, true_condition, labels); |
978 } | 973 } |
979 | 974 |
980 | 975 |
981 LocationSummary* NativeCallInstr::MakeLocationSummary(Zone* zone, | 976 LocationSummary* NativeCallInstr::MakeLocationSummary(Zone* zone, |
982 bool opt) const { | 977 bool opt) const { |
983 return MakeCallSummary(zone); | 978 return MakeCallSummary(zone); |
984 } | 979 } |
985 | 980 |
986 | 981 |
987 void NativeCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 982 void NativeCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
988 __ TraceSimMsg("NativeCallInstr"); | 983 __ Comment("NativeCallInstr"); |
989 Register result = locs()->out(0).reg(); | 984 Register result = locs()->out(0).reg(); |
990 | 985 |
991 // Push the result place holder initialized to NULL. | 986 // Push the result place holder initialized to NULL. |
992 __ PushObject(Object::null_object()); | 987 __ PushObject(Object::null_object()); |
993 // Pass a pointer to the first argument in A2. | 988 // Pass a pointer to the first argument in A2. |
994 if (!function().HasOptionalParameters()) { | 989 if (!function().HasOptionalParameters()) { |
995 __ AddImmediate(A2, FP, (kParamEndSlotFromFp + | 990 __ AddImmediate(A2, FP, (kParamEndSlotFromFp + |
996 function().NumParameters()) * kWordSize); | 991 function().NumParameters()) * kWordSize); |
997 } else { | 992 } else { |
998 __ AddImmediate(A2, FP, kFirstLocalSlotFromFp * kWordSize); | 993 __ AddImmediate(A2, FP, kFirstLocalSlotFromFp * kWordSize); |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1042 kNumInputs, | 1037 kNumInputs, |
1043 Location::RequiresRegister(), | 1038 Location::RequiresRegister(), |
1044 LocationSummary::kNoCall); | 1039 LocationSummary::kNoCall); |
1045 } | 1040 } |
1046 | 1041 |
1047 | 1042 |
1048 void StringFromCharCodeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1043 void StringFromCharCodeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1049 Register char_code = locs()->in(0).reg(); | 1044 Register char_code = locs()->in(0).reg(); |
1050 Register result = locs()->out(0).reg(); | 1045 Register result = locs()->out(0).reg(); |
1051 | 1046 |
1052 __ TraceSimMsg("StringFromCharCodeInstr"); | 1047 __ Comment("StringFromCharCodeInstr"); |
1053 | 1048 |
1054 __ LoadImmediate(result, | 1049 __ LoadImmediate(result, |
1055 reinterpret_cast<uword>(Symbols::PredefinedAddress())); | 1050 reinterpret_cast<uword>(Symbols::PredefinedAddress())); |
1056 __ AddImmediate(result, Symbols::kNullCharCodeSymbolOffset * kWordSize); | 1051 __ AddImmediate(result, Symbols::kNullCharCodeSymbolOffset * kWordSize); |
1057 __ sll(TMP, char_code, 1); // Char code is a smi. | 1052 __ sll(TMP, char_code, 1); // Char code is a smi. |
1058 __ addu(TMP, TMP, result); | 1053 __ addu(TMP, TMP, result); |
1059 __ lw(result, Address(TMP)); | 1054 __ lw(result, Address(TMP)); |
1060 } | 1055 } |
1061 | 1056 |
1062 | 1057 |
1063 LocationSummary* StringToCharCodeInstr::MakeLocationSummary(Zone* zone, | 1058 LocationSummary* StringToCharCodeInstr::MakeLocationSummary(Zone* zone, |
1064 bool opt) const { | 1059 bool opt) const { |
1065 const intptr_t kNumInputs = 1; | 1060 const intptr_t kNumInputs = 1; |
1066 return LocationSummary::Make(zone, | 1061 return LocationSummary::Make(zone, |
1067 kNumInputs, | 1062 kNumInputs, |
1068 Location::RequiresRegister(), | 1063 Location::RequiresRegister(), |
1069 LocationSummary::kNoCall); | 1064 LocationSummary::kNoCall); |
1070 } | 1065 } |
1071 | 1066 |
1072 | 1067 |
1073 void StringToCharCodeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1068 void StringToCharCodeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1074 __ TraceSimMsg("StringToCharCodeInstr"); | 1069 __ Comment("StringToCharCodeInstr"); |
1075 | 1070 |
1076 ASSERT(cid_ == kOneByteStringCid); | 1071 ASSERT(cid_ == kOneByteStringCid); |
1077 Register str = locs()->in(0).reg(); | 1072 Register str = locs()->in(0).reg(); |
1078 Register result = locs()->out(0).reg(); | 1073 Register result = locs()->out(0).reg(); |
1079 ASSERT(str != result); | 1074 ASSERT(str != result); |
1080 Label done; | 1075 Label done; |
1081 __ lw(result, FieldAddress(str, String::length_offset())); | 1076 __ lw(result, FieldAddress(str, String::length_offset())); |
1082 __ BranchNotEqual(result, Immediate(Smi::RawValue(1)), &done); | 1077 __ BranchNotEqual(result, Immediate(Smi::RawValue(1)), &done); |
1083 __ delay_slot()->addiu(result, ZR, Immediate(Smi::RawValue(-1))); | 1078 __ delay_slot()->addiu(result, ZR, Immediate(Smi::RawValue(-1))); |
1084 __ lbu(result, FieldAddress(str, OneByteString::data_offset())); | 1079 __ lbu(result, FieldAddress(str, OneByteString::data_offset())); |
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1255 (representation() == kUnboxedInt32x4)) { | 1250 (representation() == kUnboxedInt32x4)) { |
1256 locs->set_out(0, Location::RequiresFpuRegister()); | 1251 locs->set_out(0, Location::RequiresFpuRegister()); |
1257 } else { | 1252 } else { |
1258 locs->set_out(0, Location::RequiresRegister()); | 1253 locs->set_out(0, Location::RequiresRegister()); |
1259 } | 1254 } |
1260 return locs; | 1255 return locs; |
1261 } | 1256 } |
1262 | 1257 |
1263 | 1258 |
1264 void LoadIndexedInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1259 void LoadIndexedInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1265 __ TraceSimMsg("LoadIndexedInstr"); | 1260 __ Comment("LoadIndexedInstr"); |
1266 // The array register points to the backing store for external arrays. | 1261 // The array register points to the backing store for external arrays. |
1267 const Register array = locs()->in(0).reg(); | 1262 const Register array = locs()->in(0).reg(); |
1268 const Location index = locs()->in(1); | 1263 const Location index = locs()->in(1); |
1269 | 1264 |
1270 Address element_address = index.IsRegister() | 1265 Address element_address = index.IsRegister() |
1271 ? __ ElementAddressForRegIndex(true, // Load. | 1266 ? __ ElementAddressForRegIndex(true, // Load. |
1272 IsExternal(), class_id(), index_scale(), | 1267 IsExternal(), class_id(), index_scale(), |
1273 array, index.reg()) | 1268 array, index.reg()) |
1274 : __ ElementAddressForIntIndex( | 1269 : __ ElementAddressForIntIndex( |
1275 IsExternal(), class_id(), index_scale(), | 1270 IsExternal(), class_id(), index_scale(), |
(...skipping 202 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1478 break; | 1473 break; |
1479 default: | 1474 default: |
1480 UNREACHABLE(); | 1475 UNREACHABLE(); |
1481 return NULL; | 1476 return NULL; |
1482 } | 1477 } |
1483 return locs; | 1478 return locs; |
1484 } | 1479 } |
1485 | 1480 |
1486 | 1481 |
1487 void StoreIndexedInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1482 void StoreIndexedInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1488 __ TraceSimMsg("StoreIndexedInstr"); | 1483 __ Comment("StoreIndexedInstr"); |
1489 // The array register points to the backing store for external arrays. | 1484 // The array register points to the backing store for external arrays. |
1490 const Register array = locs()->in(0).reg(); | 1485 const Register array = locs()->in(0).reg(); |
1491 const Location index = locs()->in(1); | 1486 const Location index = locs()->in(1); |
1492 | 1487 |
1493 Address element_address = index.IsRegister() | 1488 Address element_address = index.IsRegister() |
1494 ? __ ElementAddressForRegIndex(false, // Store. | 1489 ? __ ElementAddressForRegIndex(false, // Store. |
1495 IsExternal(), class_id(), index_scale(), | 1490 IsExternal(), class_id(), index_scale(), |
1496 array, index.reg()) | 1491 array, index.reg()) |
1497 : __ ElementAddressForIntIndex( | 1492 : __ ElementAddressForIntIndex( |
1498 IsExternal(), class_id(), index_scale(), | 1493 IsExternal(), class_id(), index_scale(), |
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1610 | 1605 |
1611 for (intptr_t i = 0; i < num_temps; i++) { | 1606 for (intptr_t i = 0; i < num_temps; i++) { |
1612 summary->set_temp(i, Location::RequiresRegister()); | 1607 summary->set_temp(i, Location::RequiresRegister()); |
1613 } | 1608 } |
1614 | 1609 |
1615 return summary; | 1610 return summary; |
1616 } | 1611 } |
1617 | 1612 |
1618 | 1613 |
1619 void GuardFieldClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1614 void GuardFieldClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1620 __ TraceSimMsg("GuardFieldClassInstr"); | 1615 __ Comment("GuardFieldClassInstr"); |
1621 | 1616 |
1622 const intptr_t value_cid = value()->Type()->ToCid(); | 1617 const intptr_t value_cid = value()->Type()->ToCid(); |
1623 const intptr_t field_cid = field().guarded_cid(); | 1618 const intptr_t field_cid = field().guarded_cid(); |
1624 const intptr_t nullability = field().is_nullable() ? kNullCid : kIllegalCid; | 1619 const intptr_t nullability = field().is_nullable() ? kNullCid : kIllegalCid; |
1625 | 1620 |
1626 if (field_cid == kDynamicCid) { | 1621 if (field_cid == kDynamicCid) { |
1627 ASSERT(!compiler->is_optimizing()); | 1622 ASSERT(!compiler->is_optimizing()); |
1628 return; // Nothing to emit. | 1623 return; // Nothing to emit. |
1629 } | 1624 } |
1630 | 1625 |
(...skipping 452 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2083 return summary; | 2078 return summary; |
2084 } | 2079 } |
2085 | 2080 |
2086 | 2081 |
2087 // When the parser is building an implicit static getter for optimization, | 2082 // When the parser is building an implicit static getter for optimization, |
2088 // it can generate a function body where deoptimization ids do not line up | 2083 // it can generate a function body where deoptimization ids do not line up |
2089 // with the unoptimized code. | 2084 // with the unoptimized code. |
2090 // | 2085 // |
2091 // This is safe only so long as LoadStaticFieldInstr cannot deoptimize. | 2086 // This is safe only so long as LoadStaticFieldInstr cannot deoptimize. |
2092 void LoadStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2087 void LoadStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2093 __ TraceSimMsg("LoadStaticFieldInstr"); | 2088 __ Comment("LoadStaticFieldInstr"); |
2094 Register field = locs()->in(0).reg(); | 2089 Register field = locs()->in(0).reg(); |
2095 Register result = locs()->out(0).reg(); | 2090 Register result = locs()->out(0).reg(); |
2096 __ LoadFromOffset(result, field, Field::value_offset() - kHeapObjectTag); | 2091 __ LoadFromOffset(result, field, Field::value_offset() - kHeapObjectTag); |
2097 } | 2092 } |
2098 | 2093 |
2099 | 2094 |
2100 LocationSummary* StoreStaticFieldInstr::MakeLocationSummary(Zone* zone, | 2095 LocationSummary* StoreStaticFieldInstr::MakeLocationSummary(Zone* zone, |
2101 bool opt) const { | 2096 bool opt) const { |
2102 LocationSummary* locs = new(zone) LocationSummary( | 2097 LocationSummary* locs = new(zone) LocationSummary( |
2103 zone, 1, 1, LocationSummary::kNoCall); | 2098 zone, 1, 1, LocationSummary::kNoCall); |
2104 locs->set_in(0, value()->NeedsStoreBuffer() ? Location::WritableRegister() | 2099 locs->set_in(0, value()->NeedsStoreBuffer() ? Location::WritableRegister() |
2105 : Location::RequiresRegister()); | 2100 : Location::RequiresRegister()); |
2106 locs->set_temp(0, Location::RequiresRegister()); | 2101 locs->set_temp(0, Location::RequiresRegister()); |
2107 return locs; | 2102 return locs; |
2108 } | 2103 } |
2109 | 2104 |
2110 | 2105 |
2111 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2106 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2112 __ TraceSimMsg("StoreStaticFieldInstr"); | 2107 __ Comment("StoreStaticFieldInstr"); |
2113 Register value = locs()->in(0).reg(); | 2108 Register value = locs()->in(0).reg(); |
2114 Register temp = locs()->temp(0).reg(); | 2109 Register temp = locs()->temp(0).reg(); |
2115 | 2110 |
2116 __ LoadObject(temp, field()); | 2111 __ LoadObject(temp, field()); |
2117 if (this->value()->NeedsStoreBuffer()) { | 2112 if (this->value()->NeedsStoreBuffer()) { |
2118 __ StoreIntoObject(temp, | 2113 __ StoreIntoObject(temp, |
2119 FieldAddress(temp, Field::value_offset()), value, CanValueBeSmi()); | 2114 FieldAddress(temp, Field::value_offset()), value, CanValueBeSmi()); |
2120 } else { | 2115 } else { |
2121 __ StoreIntoObjectNoBarrier( | 2116 __ StoreIntoObjectNoBarrier( |
2122 temp, FieldAddress(temp, Field::value_offset()), value); | 2117 temp, FieldAddress(temp, Field::value_offset()), value); |
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2216 __ sw(T7, Address(T2, 0)); | 2211 __ sw(T7, Address(T2, 0)); |
2217 __ addiu(T2, T2, Immediate(kWordSize)); | 2212 __ addiu(T2, T2, Immediate(kWordSize)); |
2218 __ BranchUnsignedLess(T2, T1, &init_loop); | 2213 __ BranchUnsignedLess(T2, T1, &init_loop); |
2219 } | 2214 } |
2220 } | 2215 } |
2221 __ b(done); | 2216 __ b(done); |
2222 } | 2217 } |
2223 | 2218 |
2224 | 2219 |
2225 void CreateArrayInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2220 void CreateArrayInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2226 __ TraceSimMsg("CreateArrayInstr"); | 2221 __ Comment("CreateArrayInstr"); |
2227 const Register kLengthReg = A1; | 2222 const Register kLengthReg = A1; |
2228 const Register kElemTypeReg = A0; | 2223 const Register kElemTypeReg = A0; |
2229 const Register kResultReg = V0; | 2224 const Register kResultReg = V0; |
2230 ASSERT(locs()->in(0).reg() == kElemTypeReg); | 2225 ASSERT(locs()->in(0).reg() == kElemTypeReg); |
2231 ASSERT(locs()->in(1).reg() == kLengthReg); | 2226 ASSERT(locs()->in(1).reg() == kLengthReg); |
2232 | 2227 |
2233 Label slow_path, done; | 2228 Label slow_path, done; |
2234 if (num_elements()->BindsToConstant() && | 2229 if (num_elements()->BindsToConstant() && |
2235 num_elements()->BoundConstant().IsSmi()) { | 2230 num_elements()->BoundConstant().IsSmi()) { |
2236 const intptr_t length = Smi::Cast(num_elements()->BoundConstant()).Value(); | 2231 const intptr_t length = Smi::Cast(num_elements()->BoundConstant()).Value(); |
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2365 const intptr_t kNumTemps = 0; | 2360 const intptr_t kNumTemps = 0; |
2366 LocationSummary* locs = new(zone) LocationSummary( | 2361 LocationSummary* locs = new(zone) LocationSummary( |
2367 zone, kNumInputs, kNumTemps, LocationSummary::kCall); | 2362 zone, kNumInputs, kNumTemps, LocationSummary::kCall); |
2368 locs->set_in(0, Location::RegisterLocation(T0)); | 2363 locs->set_in(0, Location::RegisterLocation(T0)); |
2369 locs->set_out(0, Location::RegisterLocation(T0)); | 2364 locs->set_out(0, Location::RegisterLocation(T0)); |
2370 return locs; | 2365 return locs; |
2371 } | 2366 } |
2372 | 2367 |
2373 | 2368 |
2374 void InstantiateTypeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2369 void InstantiateTypeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2375 __ TraceSimMsg("InstantiateTypeInstr"); | 2370 __ Comment("InstantiateTypeInstr"); |
2376 Register instantiator_reg = locs()->in(0).reg(); | 2371 Register instantiator_reg = locs()->in(0).reg(); |
2377 Register result_reg = locs()->out(0).reg(); | 2372 Register result_reg = locs()->out(0).reg(); |
2378 | 2373 |
2379 // 'instantiator_reg' is the instantiator TypeArguments object (or null). | 2374 // 'instantiator_reg' is the instantiator TypeArguments object (or null). |
2380 // A runtime call to instantiate the type is required. | 2375 // A runtime call to instantiate the type is required. |
2381 __ addiu(SP, SP, Immediate(-3 * kWordSize)); | 2376 __ addiu(SP, SP, Immediate(-3 * kWordSize)); |
2382 __ LoadObject(TMP, Object::null_object()); | 2377 __ LoadObject(TMP, Object::null_object()); |
2383 __ sw(TMP, Address(SP, 2 * kWordSize)); // Make room for the result. | 2378 __ sw(TMP, Address(SP, 2 * kWordSize)); // Make room for the result. |
2384 __ LoadObject(TMP, type()); | 2379 __ LoadObject(TMP, type()); |
2385 __ sw(TMP, Address(SP, 1 * kWordSize)); | 2380 __ sw(TMP, Address(SP, 1 * kWordSize)); |
(...skipping 20 matching lines...) Expand all Loading... |
2406 LocationSummary* locs = new(zone) LocationSummary( | 2401 LocationSummary* locs = new(zone) LocationSummary( |
2407 zone, kNumInputs, kNumTemps, LocationSummary::kCall); | 2402 zone, kNumInputs, kNumTemps, LocationSummary::kCall); |
2408 locs->set_in(0, Location::RegisterLocation(T0)); | 2403 locs->set_in(0, Location::RegisterLocation(T0)); |
2409 locs->set_out(0, Location::RegisterLocation(T0)); | 2404 locs->set_out(0, Location::RegisterLocation(T0)); |
2410 return locs; | 2405 return locs; |
2411 } | 2406 } |
2412 | 2407 |
2413 | 2408 |
2414 void InstantiateTypeArgumentsInstr::EmitNativeCode( | 2409 void InstantiateTypeArgumentsInstr::EmitNativeCode( |
2415 FlowGraphCompiler* compiler) { | 2410 FlowGraphCompiler* compiler) { |
2416 __ TraceSimMsg("InstantiateTypeArgumentsInstr"); | 2411 __ Comment("InstantiateTypeArgumentsInstr"); |
2417 Register instantiator_reg = locs()->in(0).reg(); | 2412 Register instantiator_reg = locs()->in(0).reg(); |
2418 Register result_reg = locs()->out(0).reg(); | 2413 Register result_reg = locs()->out(0).reg(); |
2419 ASSERT(instantiator_reg == T0); | 2414 ASSERT(instantiator_reg == T0); |
2420 ASSERT(instantiator_reg == result_reg); | 2415 ASSERT(instantiator_reg == result_reg); |
2421 | 2416 |
2422 // 'instantiator_reg' is the instantiator TypeArguments object (or null). | 2417 // 'instantiator_reg' is the instantiator TypeArguments object (or null). |
2423 ASSERT(!type_arguments().IsUninstantiatedIdentity() && | 2418 ASSERT(!type_arguments().IsUninstantiatedIdentity() && |
2424 !type_arguments().CanShareInstantiatorTypeArguments( | 2419 !type_arguments().CanShareInstantiatorTypeArguments( |
2425 instantiator_class())); | 2420 instantiator_class())); |
2426 // If the instantiator is null and if the type argument vector | 2421 // If the instantiator is null and if the type argument vector |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2556 locs->set_temp(0, Location::RegisterLocation(T1)); | 2551 locs->set_temp(0, Location::RegisterLocation(T1)); |
2557 locs->set_out(0, Location::RegisterLocation(V0)); | 2552 locs->set_out(0, Location::RegisterLocation(V0)); |
2558 return locs; | 2553 return locs; |
2559 } | 2554 } |
2560 | 2555 |
2561 | 2556 |
2562 void AllocateContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2557 void AllocateContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2563 ASSERT(locs()->temp(0).reg() == T1); | 2558 ASSERT(locs()->temp(0).reg() == T1); |
2564 ASSERT(locs()->out(0).reg() == V0); | 2559 ASSERT(locs()->out(0).reg() == V0); |
2565 | 2560 |
2566 __ TraceSimMsg("AllocateContextInstr"); | 2561 __ Comment("AllocateContextInstr"); |
2567 __ LoadImmediate(T1, num_context_variables()); | 2562 __ LoadImmediate(T1, num_context_variables()); |
2568 StubCode* stub_code = compiler->isolate()->stub_code(); | 2563 StubCode* stub_code = compiler->isolate()->stub_code(); |
2569 const ExternalLabel label(stub_code->AllocateContextEntryPoint()); | 2564 const ExternalLabel label(stub_code->AllocateContextEntryPoint()); |
2570 compiler->GenerateCall(token_pos(), | 2565 compiler->GenerateCall(token_pos(), |
2571 &label, | 2566 &label, |
2572 RawPcDescriptors::kOther, | 2567 RawPcDescriptors::kOther, |
2573 locs()); | 2568 locs()); |
2574 } | 2569 } |
2575 | 2570 |
2576 | 2571 |
2577 LocationSummary* InitStaticFieldInstr::MakeLocationSummary(Zone* zone, | 2572 LocationSummary* InitStaticFieldInstr::MakeLocationSummary(Zone* zone, |
2578 bool opt) const { | 2573 bool opt) const { |
2579 const intptr_t kNumInputs = 1; | 2574 const intptr_t kNumInputs = 1; |
2580 const intptr_t kNumTemps = 1; | 2575 const intptr_t kNumTemps = 1; |
2581 LocationSummary* locs = new(zone) LocationSummary( | 2576 LocationSummary* locs = new(zone) LocationSummary( |
2582 zone, kNumInputs, kNumTemps, LocationSummary::kCall); | 2577 zone, kNumInputs, kNumTemps, LocationSummary::kCall); |
2583 locs->set_in(0, Location::RegisterLocation(T0)); | 2578 locs->set_in(0, Location::RegisterLocation(T0)); |
2584 locs->set_temp(0, Location::RegisterLocation(T1)); | 2579 locs->set_temp(0, Location::RegisterLocation(T1)); |
2585 return locs; | 2580 return locs; |
2586 } | 2581 } |
2587 | 2582 |
2588 | 2583 |
2589 void InitStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2584 void InitStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2590 Register field = locs()->in(0).reg(); | 2585 Register field = locs()->in(0).reg(); |
2591 Register temp = locs()->temp(0).reg(); | 2586 Register temp = locs()->temp(0).reg(); |
2592 | 2587 |
2593 Label call_runtime, no_call; | 2588 Label call_runtime, no_call; |
2594 __ TraceSimMsg("InitStaticFieldInstr"); | 2589 __ Comment("InitStaticFieldInstr"); |
2595 | 2590 |
2596 __ lw(temp, FieldAddress(field, Field::value_offset())); | 2591 __ lw(temp, FieldAddress(field, Field::value_offset())); |
2597 __ BranchEqual(temp, Object::sentinel(), &call_runtime); | 2592 __ BranchEqual(temp, Object::sentinel(), &call_runtime); |
2598 __ BranchNotEqual(temp, Object::transition_sentinel(), &no_call); | 2593 __ BranchNotEqual(temp, Object::transition_sentinel(), &no_call); |
2599 | 2594 |
2600 __ Bind(&call_runtime); | 2595 __ Bind(&call_runtime); |
2601 __ addiu(SP, SP, Immediate(-2 * kWordSize)); | 2596 __ addiu(SP, SP, Immediate(-2 * kWordSize)); |
2602 __ LoadObject(TMP, Object::null_object()); | 2597 __ LoadObject(TMP, Object::null_object()); |
2603 __ sw(TMP, Address(SP, 1 * kWordSize)); // Make room for (unused) result. | 2598 __ sw(TMP, Address(SP, 1 * kWordSize)); // Make room for (unused) result. |
2604 __ sw(field, Address(SP, 0 * kWordSize)); | 2599 __ sw(field, Address(SP, 0 * kWordSize)); |
(...skipping 19 matching lines...) Expand all Loading... |
2624 locs->set_in(0, Location::RegisterLocation(T0)); | 2619 locs->set_in(0, Location::RegisterLocation(T0)); |
2625 locs->set_out(0, Location::RegisterLocation(T0)); | 2620 locs->set_out(0, Location::RegisterLocation(T0)); |
2626 return locs; | 2621 return locs; |
2627 } | 2622 } |
2628 | 2623 |
2629 | 2624 |
2630 void CloneContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2625 void CloneContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2631 Register context_value = locs()->in(0).reg(); | 2626 Register context_value = locs()->in(0).reg(); |
2632 Register result = locs()->out(0).reg(); | 2627 Register result = locs()->out(0).reg(); |
2633 | 2628 |
2634 __ TraceSimMsg("CloneContextInstr"); | 2629 __ Comment("CloneContextInstr"); |
2635 | 2630 |
2636 __ addiu(SP, SP, Immediate(-2 * kWordSize)); | 2631 __ addiu(SP, SP, Immediate(-2 * kWordSize)); |
2637 __ LoadObject(TMP, Object::null_object()); // Make room for the result. | 2632 __ LoadObject(TMP, Object::null_object()); // Make room for the result. |
2638 __ sw(TMP, Address(SP, 1 * kWordSize)); | 2633 __ sw(TMP, Address(SP, 1 * kWordSize)); |
2639 __ sw(context_value, Address(SP, 0 * kWordSize)); | 2634 __ sw(context_value, Address(SP, 0 * kWordSize)); |
2640 | 2635 |
2641 compiler->GenerateRuntimeCall(token_pos(), | 2636 compiler->GenerateRuntimeCall(token_pos(), |
2642 deopt_id(), | 2637 deopt_id(), |
2643 kCloneContextRuntimeEntry, | 2638 kCloneContextRuntimeEntry, |
2644 1, | 2639 1, |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2704 | 2699 |
2705 class CheckStackOverflowSlowPath : public SlowPathCode { | 2700 class CheckStackOverflowSlowPath : public SlowPathCode { |
2706 public: | 2701 public: |
2707 explicit CheckStackOverflowSlowPath(CheckStackOverflowInstr* instruction) | 2702 explicit CheckStackOverflowSlowPath(CheckStackOverflowInstr* instruction) |
2708 : instruction_(instruction) { } | 2703 : instruction_(instruction) { } |
2709 | 2704 |
2710 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | 2705 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
2711 if (FLAG_use_osr) { | 2706 if (FLAG_use_osr) { |
2712 uword flags_address = Isolate::Current()->stack_overflow_flags_address(); | 2707 uword flags_address = Isolate::Current()->stack_overflow_flags_address(); |
2713 Register value = instruction_->locs()->temp(0).reg(); | 2708 Register value = instruction_->locs()->temp(0).reg(); |
2714 __ TraceSimMsg("CheckStackOverflowSlowPathOsr"); | |
2715 __ Comment("CheckStackOverflowSlowPathOsr"); | 2709 __ Comment("CheckStackOverflowSlowPathOsr"); |
2716 __ Bind(osr_entry_label()); | 2710 __ Bind(osr_entry_label()); |
2717 __ LoadImmediate(TMP, flags_address); | 2711 __ LoadImmediate(TMP, flags_address); |
2718 __ LoadImmediate(value, Isolate::kOsrRequest); | 2712 __ LoadImmediate(value, Isolate::kOsrRequest); |
2719 __ sw(value, Address(TMP)); | 2713 __ sw(value, Address(TMP)); |
2720 } | 2714 } |
2721 __ TraceSimMsg("CheckStackOverflowSlowPath"); | |
2722 __ Comment("CheckStackOverflowSlowPath"); | 2715 __ Comment("CheckStackOverflowSlowPath"); |
2723 __ Bind(entry_label()); | 2716 __ Bind(entry_label()); |
2724 compiler->SaveLiveRegisters(instruction_->locs()); | 2717 compiler->SaveLiveRegisters(instruction_->locs()); |
2725 // pending_deoptimization_env_ is needed to generate a runtime call that | 2718 // pending_deoptimization_env_ is needed to generate a runtime call that |
2726 // may throw an exception. | 2719 // may throw an exception. |
2727 ASSERT(compiler->pending_deoptimization_env_ == NULL); | 2720 ASSERT(compiler->pending_deoptimization_env_ == NULL); |
2728 Environment* env = compiler->SlowPathEnvironmentFor(instruction_); | 2721 Environment* env = compiler->SlowPathEnvironmentFor(instruction_); |
2729 compiler->pending_deoptimization_env_ = env; | 2722 compiler->pending_deoptimization_env_ = env; |
2730 compiler->GenerateRuntimeCall(instruction_->token_pos(), | 2723 compiler->GenerateRuntimeCall(instruction_->token_pos(), |
2731 instruction_->deopt_id(), | 2724 instruction_->deopt_id(), |
(...skipping 17 matching lines...) Expand all Loading... |
2749 return &osr_entry_label_; | 2742 return &osr_entry_label_; |
2750 } | 2743 } |
2751 | 2744 |
2752 private: | 2745 private: |
2753 CheckStackOverflowInstr* instruction_; | 2746 CheckStackOverflowInstr* instruction_; |
2754 Label osr_entry_label_; | 2747 Label osr_entry_label_; |
2755 }; | 2748 }; |
2756 | 2749 |
2757 | 2750 |
2758 void CheckStackOverflowInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2751 void CheckStackOverflowInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2759 __ TraceSimMsg("CheckStackOverflowInstr"); | 2752 __ Comment("CheckStackOverflowInstr"); |
2760 CheckStackOverflowSlowPath* slow_path = new CheckStackOverflowSlowPath(this); | 2753 CheckStackOverflowSlowPath* slow_path = new CheckStackOverflowSlowPath(this); |
2761 compiler->AddSlowPathCode(slow_path); | 2754 compiler->AddSlowPathCode(slow_path); |
2762 | 2755 |
2763 __ LoadImmediate(TMP, Isolate::Current()->stack_limit_address()); | 2756 __ LoadImmediate(TMP, Isolate::Current()->stack_limit_address()); |
2764 __ lw(CMPRES1, Address(TMP)); | 2757 __ lw(CMPRES1, Address(TMP)); |
2765 __ BranchUnsignedLessEqual(SP, CMPRES1, slow_path->entry_label()); | 2758 __ BranchUnsignedLessEqual(SP, CMPRES1, slow_path->entry_label()); |
2766 if (compiler->CanOSRFunction() && in_loop()) { | 2759 if (compiler->CanOSRFunction() && in_loop()) { |
2767 Register temp = locs()->temp(0).reg(); | 2760 Register temp = locs()->temp(0).reg(); |
2768 // In unoptimized code check the usage counter to trigger OSR at loop | 2761 // In unoptimized code check the usage counter to trigger OSR at loop |
2769 // stack checks. Use progressively higher thresholds for more deeply | 2762 // stack checks. Use progressively higher thresholds for more deeply |
(...skipping 14 matching lines...) Expand all Loading... |
2784 | 2777 |
2785 static void EmitSmiShiftLeft(FlowGraphCompiler* compiler, | 2778 static void EmitSmiShiftLeft(FlowGraphCompiler* compiler, |
2786 BinarySmiOpInstr* shift_left) { | 2779 BinarySmiOpInstr* shift_left) { |
2787 const LocationSummary& locs = *shift_left->locs(); | 2780 const LocationSummary& locs = *shift_left->locs(); |
2788 Register left = locs.in(0).reg(); | 2781 Register left = locs.in(0).reg(); |
2789 Register result = locs.out(0).reg(); | 2782 Register result = locs.out(0).reg(); |
2790 Label* deopt = shift_left->CanDeoptimize() ? | 2783 Label* deopt = shift_left->CanDeoptimize() ? |
2791 compiler->AddDeoptStub(shift_left->deopt_id(), ICData::kDeoptBinarySmiOp) | 2784 compiler->AddDeoptStub(shift_left->deopt_id(), ICData::kDeoptBinarySmiOp) |
2792 : NULL; | 2785 : NULL; |
2793 | 2786 |
2794 __ TraceSimMsg("EmitSmiShiftLeft"); | 2787 __ Comment("EmitSmiShiftLeft"); |
2795 | 2788 |
2796 if (locs.in(1).IsConstant()) { | 2789 if (locs.in(1).IsConstant()) { |
2797 const Object& constant = locs.in(1).constant(); | 2790 const Object& constant = locs.in(1).constant(); |
2798 ASSERT(constant.IsSmi()); | 2791 ASSERT(constant.IsSmi()); |
2799 // Immediate shift operation takes 5 bits for the count. | 2792 // Immediate shift operation takes 5 bits for the count. |
2800 const intptr_t kCountLimit = 0x1F; | 2793 const intptr_t kCountLimit = 0x1F; |
2801 const intptr_t value = Smi::Cast(constant).Value(); | 2794 const intptr_t value = Smi::Cast(constant).Value(); |
2802 ASSERT((0 < value) && (value < kCountLimit)); | 2795 ASSERT((0 < value) && (value < kCountLimit)); |
2803 if (shift_left->can_overflow()) { | 2796 if (shift_left->can_overflow()) { |
2804 // Check for overflow (preserve left). | 2797 // Check for overflow (preserve left). |
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2923 summary->set_temp(0, Location::RequiresRegister()); | 2916 summary->set_temp(0, Location::RequiresRegister()); |
2924 } | 2917 } |
2925 // We make use of 3-operand instructions by not requiring result register | 2918 // We make use of 3-operand instructions by not requiring result register |
2926 // to be identical to first input register as on Intel. | 2919 // to be identical to first input register as on Intel. |
2927 summary->set_out(0, Location::RequiresRegister()); | 2920 summary->set_out(0, Location::RequiresRegister()); |
2928 return summary; | 2921 return summary; |
2929 } | 2922 } |
2930 | 2923 |
2931 | 2924 |
2932 void BinarySmiOpInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2925 void BinarySmiOpInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2933 __ TraceSimMsg("BinarySmiOpInstr"); | 2926 __ Comment("BinarySmiOpInstr"); |
2934 if (op_kind() == Token::kSHL) { | 2927 if (op_kind() == Token::kSHL) { |
2935 EmitSmiShiftLeft(compiler, this); | 2928 EmitSmiShiftLeft(compiler, this); |
2936 return; | 2929 return; |
2937 } | 2930 } |
2938 | 2931 |
2939 Register left = locs()->in(0).reg(); | 2932 Register left = locs()->in(0).reg(); |
2940 Register result = locs()->out(0).reg(); | 2933 Register result = locs()->out(0).reg(); |
2941 Label* deopt = NULL; | 2934 Label* deopt = NULL; |
2942 if (CanDeoptimize()) { | 2935 if (CanDeoptimize()) { |
2943 deopt = compiler->AddDeoptStub(deopt_id(), ICData::kDeoptBinarySmiOp); | 2936 deopt = compiler->AddDeoptStub(deopt_id(), ICData::kDeoptBinarySmiOp); |
2944 } | 2937 } |
2945 | 2938 |
2946 if (locs()->in(1).IsConstant()) { | 2939 if (locs()->in(1).IsConstant()) { |
2947 const Object& constant = locs()->in(1).constant(); | 2940 const Object& constant = locs()->in(1).constant(); |
2948 ASSERT(constant.IsSmi()); | 2941 ASSERT(constant.IsSmi()); |
2949 const int32_t imm = reinterpret_cast<int32_t>(constant.raw()); | 2942 const int32_t imm = reinterpret_cast<int32_t>(constant.raw()); |
2950 switch (op_kind()) { | 2943 switch (op_kind()) { |
2951 case Token::kADD: { | 2944 case Token::kADD: { |
2952 if (deopt == NULL) { | 2945 if (deopt == NULL) { |
2953 __ AddImmediate(result, left, imm); | 2946 __ AddImmediate(result, left, imm); |
2954 } else { | 2947 } else { |
2955 Register temp = locs()->temp(0).reg(); | 2948 Register temp = locs()->temp(0).reg(); |
2956 __ AddImmediateDetectOverflow(result, left, imm, CMPRES1, temp); | 2949 __ AddImmediateDetectOverflow(result, left, imm, CMPRES1, temp); |
2957 __ bltz(CMPRES1, deopt); | 2950 __ bltz(CMPRES1, deopt); |
2958 } | 2951 } |
2959 break; | 2952 break; |
2960 } | 2953 } |
2961 case Token::kSUB: { | 2954 case Token::kSUB: { |
2962 __ TraceSimMsg("kSUB imm"); | 2955 __ Comment("kSUB imm"); |
2963 if (deopt == NULL) { | 2956 if (deopt == NULL) { |
2964 __ AddImmediate(result, left, -imm); | 2957 __ AddImmediate(result, left, -imm); |
2965 } else { | 2958 } else { |
2966 __ SubImmediateDetectOverflow(result, left, imm, CMPRES1); | 2959 __ SubImmediateDetectOverflow(result, left, imm, CMPRES1); |
2967 __ bltz(CMPRES1, deopt); | 2960 __ bltz(CMPRES1, deopt); |
2968 } | 2961 } |
2969 break; | 2962 break; |
2970 } | 2963 } |
2971 case Token::kMUL: { | 2964 case Token::kMUL: { |
2972 // Keep left value tagged and untag right value. | 2965 // Keep left value tagged and untag right value. |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3012 } | 3005 } |
3013 case Token::kBIT_XOR: { | 3006 case Token::kBIT_XOR: { |
3014 // No overflow check. | 3007 // No overflow check. |
3015 __ XorImmediate(result, left, imm); | 3008 __ XorImmediate(result, left, imm); |
3016 break; | 3009 break; |
3017 } | 3010 } |
3018 case Token::kSHR: { | 3011 case Token::kSHR: { |
3019 // sarl operation masks the count to 5 bits. | 3012 // sarl operation masks the count to 5 bits. |
3020 const intptr_t kCountLimit = 0x1F; | 3013 const intptr_t kCountLimit = 0x1F; |
3021 const intptr_t value = Smi::Cast(constant).Value(); | 3014 const intptr_t value = Smi::Cast(constant).Value(); |
3022 __ TraceSimMsg("kSHR"); | 3015 __ Comment("kSHR"); |
3023 __ sra(result, left, Utils::Minimum(value + kSmiTagSize, kCountLimit)); | 3016 __ sra(result, left, Utils::Minimum(value + kSmiTagSize, kCountLimit)); |
3024 __ SmiTag(result); | 3017 __ SmiTag(result); |
3025 break; | 3018 break; |
3026 } | 3019 } |
3027 | 3020 |
3028 default: | 3021 default: |
3029 UNREACHABLE(); | 3022 UNREACHABLE(); |
3030 break; | 3023 break; |
3031 } | 3024 } |
3032 return; | 3025 return; |
3033 } | 3026 } |
3034 | 3027 |
3035 Register right = locs()->in(1).reg(); | 3028 Register right = locs()->in(1).reg(); |
3036 Range* right_range = this->right()->definition()->range(); | 3029 Range* right_range = this->right()->definition()->range(); |
3037 switch (op_kind()) { | 3030 switch (op_kind()) { |
3038 case Token::kADD: { | 3031 case Token::kADD: { |
3039 if (deopt == NULL) { | 3032 if (deopt == NULL) { |
3040 __ addu(result, left, right); | 3033 __ addu(result, left, right); |
3041 } else { | 3034 } else { |
3042 Register temp = locs()->temp(0).reg(); | 3035 Register temp = locs()->temp(0).reg(); |
3043 __ AdduDetectOverflow(result, left, right, CMPRES1, temp); | 3036 __ AdduDetectOverflow(result, left, right, CMPRES1, temp); |
3044 __ bltz(CMPRES1, deopt); | 3037 __ bltz(CMPRES1, deopt); |
3045 } | 3038 } |
3046 break; | 3039 break; |
3047 } | 3040 } |
3048 case Token::kSUB: { | 3041 case Token::kSUB: { |
3049 __ TraceSimMsg("kSUB"); | 3042 __ Comment("kSUB"); |
3050 if (deopt == NULL) { | 3043 if (deopt == NULL) { |
3051 __ subu(result, left, right); | 3044 __ subu(result, left, right); |
3052 } else { | 3045 } else { |
3053 __ SubuDetectOverflow(result, left, right, CMPRES1); | 3046 __ SubuDetectOverflow(result, left, right, CMPRES1); |
3054 __ bltz(CMPRES1, deopt); | 3047 __ bltz(CMPRES1, deopt); |
3055 } | 3048 } |
3056 break; | 3049 break; |
3057 } | 3050 } |
3058 case Token::kMUL: { | 3051 case Token::kMUL: { |
3059 __ TraceSimMsg("kMUL"); | 3052 __ Comment("kMUL"); |
3060 __ sra(TMP, left, kSmiTagSize); | 3053 __ sra(TMP, left, kSmiTagSize); |
3061 __ mult(TMP, right); | 3054 __ mult(TMP, right); |
3062 __ mflo(result); | 3055 __ mflo(result); |
3063 if (deopt != NULL) { | 3056 if (deopt != NULL) { |
3064 __ mfhi(CMPRES2); | 3057 __ mfhi(CMPRES2); |
3065 __ sra(CMPRES1, result, 31); | 3058 __ sra(CMPRES1, result, 31); |
3066 __ bne(CMPRES1, CMPRES2, deopt); | 3059 __ bne(CMPRES1, CMPRES2, deopt); |
3067 } | 3060 } |
3068 break; | 3061 break; |
3069 } | 3062 } |
(...skipping 1518 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4588 } | 4581 } |
4589 | 4582 |
4590 | 4583 |
4591 LocationSummary* PolymorphicInstanceCallInstr::MakeLocationSummary( | 4584 LocationSummary* PolymorphicInstanceCallInstr::MakeLocationSummary( |
4592 Zone* zone, bool opt) const { | 4585 Zone* zone, bool opt) const { |
4593 return MakeCallSummary(zone); | 4586 return MakeCallSummary(zone); |
4594 } | 4587 } |
4595 | 4588 |
4596 | 4589 |
4597 void PolymorphicInstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4590 void PolymorphicInstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
4598 __ TraceSimMsg("PolymorphicInstanceCallInstr"); | 4591 __ Comment("PolymorphicInstanceCallInstr"); |
4599 ASSERT(ic_data().NumArgsTested() == 1); | 4592 ASSERT(ic_data().NumArgsTested() == 1); |
4600 if (!with_checks()) { | 4593 if (!with_checks()) { |
4601 ASSERT(ic_data().HasOneTarget()); | 4594 ASSERT(ic_data().HasOneTarget()); |
4602 const Function& target = Function::ZoneHandle(ic_data().GetTargetAt(0)); | 4595 const Function& target = Function::ZoneHandle(ic_data().GetTargetAt(0)); |
4603 compiler->GenerateStaticCall(deopt_id(), | 4596 compiler->GenerateStaticCall(deopt_id(), |
4604 instance_call()->token_pos(), | 4597 instance_call()->token_pos(), |
4605 target, | 4598 target, |
4606 instance_call()->ArgumentCount(), | 4599 instance_call()->ArgumentCount(), |
4607 instance_call()->argument_names(), | 4600 instance_call()->argument_names(), |
4608 locs(), | 4601 locs(), |
(...skipping 23 matching lines...) Expand all Loading... |
4632 LocationSummary* BranchInstr::MakeLocationSummary(Zone* zone, | 4625 LocationSummary* BranchInstr::MakeLocationSummary(Zone* zone, |
4633 bool opt) const { | 4626 bool opt) const { |
4634 comparison()->InitializeLocationSummary(zone, opt); | 4627 comparison()->InitializeLocationSummary(zone, opt); |
4635 // Branches don't produce a result. | 4628 // Branches don't produce a result. |
4636 comparison()->locs()->set_out(0, Location::NoLocation()); | 4629 comparison()->locs()->set_out(0, Location::NoLocation()); |
4637 return comparison()->locs(); | 4630 return comparison()->locs(); |
4638 } | 4631 } |
4639 | 4632 |
4640 | 4633 |
4641 void BranchInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4634 void BranchInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
4642 __ TraceSimMsg("BranchInstr"); | 4635 __ Comment("BranchInstr"); |
4643 comparison()->EmitBranchCode(compiler, this); | 4636 comparison()->EmitBranchCode(compiler, this); |
4644 } | 4637 } |
4645 | 4638 |
4646 | 4639 |
4647 LocationSummary* CheckClassInstr::MakeLocationSummary(Zone* zone, | 4640 LocationSummary* CheckClassInstr::MakeLocationSummary(Zone* zone, |
4648 bool opt) const { | 4641 bool opt) const { |
4649 const intptr_t kNumInputs = 1; | 4642 const intptr_t kNumInputs = 1; |
4650 const bool need_mask_temp = IsDenseSwitch() && !IsDenseMask(ComputeCidMask()); | 4643 const bool need_mask_temp = IsDenseSwitch() && !IsDenseMask(ComputeCidMask()); |
4651 const intptr_t kNumTemps = !IsNullCheck() ? (need_mask_temp ? 2 : 1) : 0; | 4644 const intptr_t kNumTemps = !IsNullCheck() ? (need_mask_temp ? 2 : 1) : 0; |
4652 LocationSummary* summary = new(zone) LocationSummary( | 4645 LocationSummary* summary = new(zone) LocationSummary( |
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4734 const intptr_t kNumInputs = 1; | 4727 const intptr_t kNumInputs = 1; |
4735 const intptr_t kNumTemps = 0; | 4728 const intptr_t kNumTemps = 0; |
4736 LocationSummary* summary = new(zone) LocationSummary( | 4729 LocationSummary* summary = new(zone) LocationSummary( |
4737 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); | 4730 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); |
4738 summary->set_in(0, Location::RequiresRegister()); | 4731 summary->set_in(0, Location::RequiresRegister()); |
4739 return summary; | 4732 return summary; |
4740 } | 4733 } |
4741 | 4734 |
4742 | 4735 |
4743 void CheckSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 4736 void CheckSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
4744 __ TraceSimMsg("CheckSmiInstr"); | 4737 __ Comment("CheckSmiInstr"); |
4745 Register value = locs()->in(0).reg(); | 4738 Register value = locs()->in(0).reg(); |
4746 Label* deopt = compiler->AddDeoptStub(deopt_id(), | 4739 Label* deopt = compiler->AddDeoptStub(deopt_id(), |
4747 ICData::kDeoptCheckSmi, | 4740 ICData::kDeoptCheckSmi, |
4748 licm_hoisted_ ? ICData::kHoisted : 0); | 4741 licm_hoisted_ ? ICData::kHoisted : 0); |
4749 __ andi(CMPRES1, value, Immediate(kSmiTagMask)); | 4742 __ andi(CMPRES1, value, Immediate(kSmiTagMask)); |
4750 __ bne(CMPRES1, ZR, deopt); | 4743 __ bne(CMPRES1, ZR, deopt); |
4751 } | 4744 } |
4752 | 4745 |
4753 | 4746 |
4754 LocationSummary* CheckClassIdInstr::MakeLocationSummary(Zone* zone, | 4747 LocationSummary* CheckClassIdInstr::MakeLocationSummary(Zone* zone, |
(...skipping 633 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5388 } | 5381 } |
5389 | 5382 |
5390 | 5383 |
5391 LocationSummary* GotoInstr::MakeLocationSummary(Zone* zone, | 5384 LocationSummary* GotoInstr::MakeLocationSummary(Zone* zone, |
5392 bool opt) const { | 5385 bool opt) const { |
5393 return new(zone) LocationSummary(zone, 0, 0, LocationSummary::kNoCall); | 5386 return new(zone) LocationSummary(zone, 0, 0, LocationSummary::kNoCall); |
5394 } | 5387 } |
5395 | 5388 |
5396 | 5389 |
5397 void GotoInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 5390 void GotoInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
5398 __ TraceSimMsg("GotoInstr"); | 5391 __ Comment("GotoInstr"); |
5399 if (!compiler->is_optimizing()) { | 5392 if (!compiler->is_optimizing()) { |
5400 if (FLAG_emit_edge_counters) { | 5393 if (FLAG_emit_edge_counters) { |
5401 compiler->EmitEdgeCounter(); | 5394 compiler->EmitEdgeCounter(); |
5402 } | 5395 } |
5403 // Add a deoptimization descriptor for deoptimizing instructions that | 5396 // Add a deoptimization descriptor for deoptimizing instructions that |
5404 // may be inserted before this instruction. On MIPS this descriptor | 5397 // may be inserted before this instruction. On MIPS this descriptor |
5405 // points after the edge counter code so that we can reuse the same | 5398 // points after the edge counter code so that we can reuse the same |
5406 // pattern matching code as at call sites, which matches backwards from | 5399 // pattern matching code as at call sites, which matches backwards from |
5407 // the end of the pattern. | 5400 // the end of the pattern. |
5408 compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt, | 5401 compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt, |
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5503 } | 5496 } |
5504 if (kind() != Token::kEQ_STRICT) { | 5497 if (kind() != Token::kEQ_STRICT) { |
5505 ASSERT(kind() == Token::kNE_STRICT); | 5498 ASSERT(kind() == Token::kNE_STRICT); |
5506 true_condition = NegateCondition(true_condition); | 5499 true_condition = NegateCondition(true_condition); |
5507 } | 5500 } |
5508 return true_condition; | 5501 return true_condition; |
5509 } | 5502 } |
5510 | 5503 |
5511 | 5504 |
5512 void StrictCompareInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 5505 void StrictCompareInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
5513 __ TraceSimMsg("StrictCompareInstr"); | |
5514 __ Comment("StrictCompareInstr"); | 5506 __ Comment("StrictCompareInstr"); |
5515 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); | 5507 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); |
5516 | 5508 |
5517 Label is_true, is_false; | 5509 Label is_true, is_false; |
5518 BranchLabels labels = { &is_true, &is_false, &is_false }; | 5510 BranchLabels labels = { &is_true, &is_false, &is_false }; |
5519 Condition true_condition = EmitComparisonCode(compiler, labels); | 5511 Condition true_condition = EmitComparisonCode(compiler, labels); |
5520 EmitBranchOnCondition(compiler, true_condition, labels); | 5512 EmitBranchOnCondition(compiler, true_condition, labels); |
5521 | 5513 |
5522 Register result = locs()->out(0).reg(); | 5514 Register result = locs()->out(0).reg(); |
5523 Label done; | 5515 Label done; |
5524 __ Bind(&is_false); | 5516 __ Bind(&is_false); |
5525 __ LoadObject(result, Bool::False()); | 5517 __ LoadObject(result, Bool::False()); |
5526 __ b(&done); | 5518 __ b(&done); |
5527 __ Bind(&is_true); | 5519 __ Bind(&is_true); |
5528 __ LoadObject(result, Bool::True()); | 5520 __ LoadObject(result, Bool::True()); |
5529 __ Bind(&done); | 5521 __ Bind(&done); |
5530 } | 5522 } |
5531 | 5523 |
5532 | 5524 |
5533 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, | 5525 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, |
5534 BranchInstr* branch) { | 5526 BranchInstr* branch) { |
5535 __ TraceSimMsg("StrictCompareInstr::EmitBranchCode"); | 5527 __ Comment("StrictCompareInstr::EmitBranchCode"); |
5536 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); | 5528 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); |
5537 | 5529 |
5538 BranchLabels labels = compiler->CreateBranchLabels(branch); | 5530 BranchLabels labels = compiler->CreateBranchLabels(branch); |
5539 Condition true_condition = EmitComparisonCode(compiler, labels); | 5531 Condition true_condition = EmitComparisonCode(compiler, labels); |
5540 EmitBranchOnCondition(compiler, true_condition, labels); | 5532 EmitBranchOnCondition(compiler, true_condition, labels); |
5541 } | 5533 } |
5542 | 5534 |
5543 | 5535 |
5544 LocationSummary* BooleanNegateInstr::MakeLocationSummary(Zone* zone, | 5536 LocationSummary* BooleanNegateInstr::MakeLocationSummary(Zone* zone, |
5545 bool opt) const { | 5537 bool opt) const { |
(...skipping 15 matching lines...) Expand all Loading... |
5561 } | 5553 } |
5562 | 5554 |
5563 | 5555 |
5564 LocationSummary* AllocateObjectInstr::MakeLocationSummary(Zone* zone, | 5556 LocationSummary* AllocateObjectInstr::MakeLocationSummary(Zone* zone, |
5565 bool opt) const { | 5557 bool opt) const { |
5566 return MakeCallSummary(zone); | 5558 return MakeCallSummary(zone); |
5567 } | 5559 } |
5568 | 5560 |
5569 | 5561 |
5570 void AllocateObjectInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 5562 void AllocateObjectInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
5571 __ TraceSimMsg("AllocateObjectInstr"); | |
5572 __ Comment("AllocateObjectInstr"); | 5563 __ Comment("AllocateObjectInstr"); |
5573 Isolate* isolate = compiler->isolate(); | 5564 Isolate* isolate = compiler->isolate(); |
5574 StubCode* stub_code = isolate->stub_code(); | 5565 StubCode* stub_code = isolate->stub_code(); |
5575 const Code& stub = Code::Handle(isolate, | 5566 const Code& stub = Code::Handle(isolate, |
5576 stub_code->GetAllocationStubForClass(cls())); | 5567 stub_code->GetAllocationStubForClass(cls())); |
5577 const ExternalLabel label(stub.EntryPoint()); | 5568 const ExternalLabel label(stub.EntryPoint()); |
5578 compiler->GenerateCall(token_pos(), | 5569 compiler->GenerateCall(token_pos(), |
5579 &label, | 5570 &label, |
5580 RawPcDescriptors::kOther, | 5571 RawPcDescriptors::kOther, |
5581 locs()); | 5572 locs()); |
(...skipping 22 matching lines...) Expand all Loading... |
5604 zone, kNumInputs, kNumTemps, LocationSummary::kCall); | 5595 zone, kNumInputs, kNumTemps, LocationSummary::kCall); |
5605 locs->set_in(0, Location::RegisterLocation(T0)); | 5596 locs->set_in(0, Location::RegisterLocation(T0)); |
5606 locs->set_out(0, Location::RegisterLocation(T0)); | 5597 locs->set_out(0, Location::RegisterLocation(T0)); |
5607 return locs; | 5598 return locs; |
5608 } | 5599 } |
5609 | 5600 |
5610 | 5601 |
5611 void GrowRegExpStackInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 5602 void GrowRegExpStackInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
5612 const Register typed_data = locs()->in(0).reg(); | 5603 const Register typed_data = locs()->in(0).reg(); |
5613 const Register result = locs()->out(0).reg(); | 5604 const Register result = locs()->out(0).reg(); |
5614 __ TraceSimMsg("GrowRegExpStackInstr"); | 5605 __ Comment("GrowRegExpStackInstr"); |
5615 __ addiu(SP, SP, Immediate(-2 * kWordSize)); | 5606 __ addiu(SP, SP, Immediate(-2 * kWordSize)); |
5616 __ LoadObject(TMP, Object::null_object()); | 5607 __ LoadObject(TMP, Object::null_object()); |
5617 __ sw(TMP, Address(SP, 1 * kWordSize)); | 5608 __ sw(TMP, Address(SP, 1 * kWordSize)); |
5618 __ sw(typed_data, Address(SP, 0 * kWordSize)); | 5609 __ sw(typed_data, Address(SP, 0 * kWordSize)); |
5619 compiler->GenerateRuntimeCall(Scanner::kNoSourcePos, // No token position. | 5610 compiler->GenerateRuntimeCall(Scanner::kNoSourcePos, // No token position. |
5620 deopt_id(), | 5611 deopt_id(), |
5621 kGrowRegExpStackRuntimeEntry, | 5612 kGrowRegExpStackRuntimeEntry, |
5622 1, | 5613 1, |
5623 locs()); | 5614 locs()); |
5624 __ lw(result, Address(SP, 1 * kWordSize)); | 5615 __ lw(result, Address(SP, 1 * kWordSize)); |
5625 __ addiu(SP, SP, Immediate(2 * kWordSize)); | 5616 __ addiu(SP, SP, Immediate(2 * kWordSize)); |
5626 } | 5617 } |
5627 | 5618 |
5628 | 5619 |
5629 } // namespace dart | 5620 } // namespace dart |
5630 | 5621 |
5631 #endif // defined TARGET_ARCH_MIPS | 5622 #endif // defined TARGET_ARCH_MIPS |
OLD | NEW |