OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
6 | 6 |
7 #include "src/arm64/macro-assembler-arm64.h" | 7 #include "src/arm64/macro-assembler-arm64.h" |
8 #include "src/compiler/code-generator-impl.h" | 8 #include "src/compiler/code-generator-impl.h" |
9 #include "src/compiler/gap-resolver.h" | 9 #include "src/compiler/gap-resolver.h" |
10 #include "src/compiler/node-matchers.h" | 10 #include "src/compiler/node-matchers.h" |
(...skipping 196 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
207 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset)); | 207 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset)); |
208 __ cmp(cp, temp); | 208 __ cmp(cp, temp); |
209 __ Assert(eq, kWrongFunctionContext); | 209 __ Assert(eq, kWrongFunctionContext); |
210 } | 210 } |
211 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 211 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); |
212 __ Call(x10); | 212 __ Call(x10); |
213 AddSafepointAndDeopt(instr); | 213 AddSafepointAndDeopt(instr); |
214 break; | 214 break; |
215 } | 215 } |
216 case kArchJmp: | 216 case kArchJmp: |
217 __ B(GetLabel(i.InputRpo(0))); | 217 AssembleArchJump(i.InputRpo(0)); |
218 break; | 218 break; |
219 case kArchNop: | 219 case kArchNop: |
220 // don't emit code for nops. | 220 // don't emit code for nops. |
221 break; | 221 break; |
222 case kArchRet: | 222 case kArchRet: |
223 AssembleReturn(); | 223 AssembleReturn(); |
224 break; | 224 break; |
225 case kArchStackPointer: | 225 case kArchStackPointer: |
226 __ mov(i.OutputRegister(), masm()->StackPointer()); | 226 __ mov(i.OutputRegister(), masm()->StackPointer()); |
227 break; | 227 break; |
(...skipping 377 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
605 if (csp.is(masm()->StackPointer())) { | 605 if (csp.is(masm()->StackPointer())) { |
606 __ Pop(temp, lr); | 606 __ Pop(temp, lr); |
607 } | 607 } |
608 break; | 608 break; |
609 } | 609 } |
610 } | 610 } |
611 } | 611 } |
612 | 612 |
613 | 613 |
614 // Assemble branches after this instruction. | 614 // Assemble branches after this instruction. |
615 void CodeGenerator::AssembleArchBranch(Instruction* instr, | 615 void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) { |
616 FlagsCondition condition) { | |
617 Arm64OperandConverter i(this, instr); | 616 Arm64OperandConverter i(this, instr); |
618 Label done; | 617 Label* tlabel = branch->true_label; |
619 | 618 Label* flabel = branch->false_label; |
620 // Emit a branch. The true and false targets are always the last two inputs | 619 switch (branch->condition) { |
621 // to the instruction. | |
622 BasicBlock::RpoNumber tblock = | |
623 i.InputRpo(static_cast<int>(instr->InputCount()) - 2); | |
624 BasicBlock::RpoNumber fblock = | |
625 i.InputRpo(static_cast<int>(instr->InputCount()) - 1); | |
626 bool fallthru = IsNextInAssemblyOrder(fblock); | |
627 Label* tlabel = GetLabel(tblock); | |
628 Label* flabel = fallthru ? &done : GetLabel(fblock); | |
629 switch (condition) { | |
630 case kUnorderedEqual: | 620 case kUnorderedEqual: |
631 __ B(vs, flabel); | 621 __ B(vs, flabel); |
632 // Fall through. | 622 // Fall through. |
633 case kEqual: | 623 case kEqual: |
634 __ B(eq, tlabel); | 624 __ B(eq, tlabel); |
635 break; | 625 break; |
636 case kUnorderedNotEqual: | 626 case kUnorderedNotEqual: |
637 __ B(vs, tlabel); | 627 __ B(vs, tlabel); |
638 // Fall through. | 628 // Fall through. |
639 case kNotEqual: | 629 case kNotEqual: |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
675 case kUnsignedGreaterThan: | 665 case kUnsignedGreaterThan: |
676 __ B(hi, tlabel); | 666 __ B(hi, tlabel); |
677 break; | 667 break; |
678 case kOverflow: | 668 case kOverflow: |
679 __ B(vs, tlabel); | 669 __ B(vs, tlabel); |
680 break; | 670 break; |
681 case kNotOverflow: | 671 case kNotOverflow: |
682 __ B(vc, tlabel); | 672 __ B(vc, tlabel); |
683 break; | 673 break; |
684 } | 674 } |
685 if (!fallthru) __ B(flabel); // no fallthru to flabel. | 675 if (!branch->fallthru) __ B(flabel); // no fallthru to flabel. |
686 __ Bind(&done); | |
687 } | 676 } |
688 | 677 |
689 | 678 |
| 679 void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) { |
| 680 if (!IsNextInAssemblyOrder(target)) __ B(GetLabel(target)); |
| 681 } |
| 682 |
| 683 |
690 // Assemble boolean materializations after this instruction. | 684 // Assemble boolean materializations after this instruction. |
691 void CodeGenerator::AssembleArchBoolean(Instruction* instr, | 685 void CodeGenerator::AssembleArchBoolean(Instruction* instr, |
692 FlagsCondition condition) { | 686 FlagsCondition condition) { |
693 Arm64OperandConverter i(this, instr); | 687 Arm64OperandConverter i(this, instr); |
694 Label done; | 688 Label done; |
695 | 689 |
696 // Materialize a full 64-bit 1 or 0 value. The result register is always the | 690 // Materialize a full 64-bit 1 or 0 value. The result register is always the |
697 // last output of the instruction. | 691 // last output of the instruction. |
698 Label check; | 692 Label check; |
699 DCHECK_NE(0, instr->OutputCount()); | 693 DCHECK_NE(0, instr->OutputCount()); |
(...skipping 313 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1013 } | 1007 } |
1014 } | 1008 } |
1015 MarkLazyDeoptSite(); | 1009 MarkLazyDeoptSite(); |
1016 } | 1010 } |
1017 | 1011 |
1018 #undef __ | 1012 #undef __ |
1019 | 1013 |
1020 } // namespace compiler | 1014 } // namespace compiler |
1021 } // namespace internal | 1015 } // namespace internal |
1022 } // namespace v8 | 1016 } // namespace v8 |
OLD | NEW |