OLD | NEW |
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
6 | 6 |
7 #include "src/ast/scopes.h" | 7 #include "src/ast/scopes.h" |
8 #include "src/compiler/code-generator-impl.h" | 8 #include "src/compiler/code-generator-impl.h" |
9 #include "src/compiler/gap-resolver.h" | 9 #include "src/compiler/gap-resolver.h" |
10 #include "src/compiler/node-matchers.h" | 10 #include "src/compiler/node-matchers.h" |
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
160 OutOfLineRecordWrite(CodeGenerator* gen, Register object, int32_t offset, | 160 OutOfLineRecordWrite(CodeGenerator* gen, Register object, int32_t offset, |
161 Register value, Register scratch0, Register scratch1, | 161 Register value, Register scratch0, Register scratch1, |
162 RecordWriteMode mode) | 162 RecordWriteMode mode) |
163 : OutOfLineCode(gen), | 163 : OutOfLineCode(gen), |
164 object_(object), | 164 object_(object), |
165 offset_(no_reg), | 165 offset_(no_reg), |
166 offset_immediate_(offset), | 166 offset_immediate_(offset), |
167 value_(value), | 167 value_(value), |
168 scratch0_(scratch0), | 168 scratch0_(scratch0), |
169 scratch1_(scratch1), | 169 scratch1_(scratch1), |
170 mode_(mode) {} | 170 mode_(mode), |
| 171 must_save_lr_(!gen->frame_access_state()->has_frame()) {} |
171 | 172 |
172 void Generate() final { | 173 void Generate() final { |
173 if (mode_ > RecordWriteMode::kValueIsPointer) { | 174 if (mode_ > RecordWriteMode::kValueIsPointer) { |
174 __ JumpIfSmi(value_, exit()); | 175 __ JumpIfSmi(value_, exit()); |
175 } | 176 } |
176 __ CheckPageFlag(value_, scratch0_, | 177 __ CheckPageFlag(value_, scratch0_, |
177 MemoryChunk::kPointersToHereAreInterestingMask, eq, | 178 MemoryChunk::kPointersToHereAreInterestingMask, eq, |
178 exit()); | 179 exit()); |
179 RememberedSetAction const remembered_set_action = | 180 RememberedSetAction const remembered_set_action = |
180 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET | 181 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET |
181 : OMIT_REMEMBERED_SET; | 182 : OMIT_REMEMBERED_SET; |
182 SaveFPRegsMode const save_fp_mode = | 183 SaveFPRegsMode const save_fp_mode = |
183 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs; | 184 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs; |
184 if (!frame()->needs_frame()) { | 185 if (must_save_lr_) { |
185 // We need to save and restore r14 if the frame was elided. | 186 // We need to save and restore r14 if the frame was elided. |
186 __ Push(r14); | 187 __ Push(r14); |
187 } | 188 } |
188 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_, | 189 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_, |
189 remembered_set_action, save_fp_mode); | 190 remembered_set_action, save_fp_mode); |
190 if (offset_.is(no_reg)) { | 191 if (offset_.is(no_reg)) { |
191 __ AddP(scratch1_, object_, Operand(offset_immediate_)); | 192 __ AddP(scratch1_, object_, Operand(offset_immediate_)); |
192 } else { | 193 } else { |
193 DCHECK_EQ(0, offset_immediate_); | 194 DCHECK_EQ(0, offset_immediate_); |
194 __ AddP(scratch1_, object_, offset_); | 195 __ AddP(scratch1_, object_, offset_); |
195 } | 196 } |
196 __ CallStub(&stub); | 197 __ CallStub(&stub); |
197 if (!frame()->needs_frame()) { | 198 if (must_save_lr_) { |
198 // We need to save and restore r14 if the frame was elided. | 199 // We need to save and restore r14 if the frame was elided. |
199 __ Pop(r14); | 200 __ Pop(r14); |
200 } | 201 } |
201 } | 202 } |
202 | 203 |
203 private: | 204 private: |
204 Register const object_; | 205 Register const object_; |
205 Register const offset_; | 206 Register const offset_; |
206 int32_t const offset_immediate_; // Valid if offset_.is(no_reg). | 207 int32_t const offset_immediate_; // Valid if offset_.is(no_reg). |
207 Register const value_; | 208 Register const value_; |
208 Register const scratch0_; | 209 Register const scratch0_; |
209 Register const scratch1_; | 210 Register const scratch1_; |
210 RecordWriteMode const mode_; | 211 RecordWriteMode const mode_; |
| 212 bool must_save_lr_; |
211 }; | 213 }; |
212 | 214 |
213 Condition FlagsConditionToCondition(FlagsCondition condition, ArchOpcode op) { | 215 Condition FlagsConditionToCondition(FlagsCondition condition, ArchOpcode op) { |
214 switch (condition) { | 216 switch (condition) { |
215 case kEqual: | 217 case kEqual: |
216 return eq; | 218 return eq; |
217 case kNotEqual: | 219 case kNotEqual: |
218 return ne; | 220 return ne; |
219 case kSignedLessThan: | 221 case kSignedLessThan: |
220 case kUnsignedLessThan: | 222 case kUnsignedLessThan: |
(...skipping 332 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
553 __ CmpLogical32(offset, i.InputRegister(2)); \ | 555 __ CmpLogical32(offset, i.InputRegister(2)); \ |
554 } else { \ | 556 } else { \ |
555 __ CmpLogical32(offset, i.InputImmediate(2)); \ | 557 __ CmpLogical32(offset, i.InputImmediate(2)); \ |
556 } \ | 558 } \ |
557 __ bge(&done); \ | 559 __ bge(&done); \ |
558 Register value = i.InputRegister(3); \ | 560 Register value = i.InputRegister(3); \ |
559 __ asm_instr(value, operand); \ | 561 __ asm_instr(value, operand); \ |
560 __ bind(&done); \ | 562 __ bind(&done); \ |
561 } while (0) | 563 } while (0) |
562 | 564 |
| 565 void CodeGenerator::AssembleDeconstructFrame() { |
| 566 __ LeaveFrame(StackFrame::MANUAL); |
| 567 } |
| 568 |
| 569 void CodeGenerator::AssembleSetupStackPointer() {} |
| 570 |
563 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { | 571 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { |
564 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 572 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
565 if (sp_slot_delta > 0) { | 573 if (sp_slot_delta > 0) { |
566 __ AddP(sp, sp, Operand(sp_slot_delta * kPointerSize)); | 574 __ AddP(sp, sp, Operand(sp_slot_delta * kPointerSize)); |
567 } | 575 } |
568 frame_access_state()->SetFrameAccessToDefault(); | 576 frame_access_state()->SetFrameAccessToDefault(); |
569 } | 577 } |
570 | 578 |
571 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { | 579 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { |
572 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 580 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
573 if (sp_slot_delta < 0) { | 581 if (sp_slot_delta < 0) { |
574 __ AddP(sp, sp, Operand(sp_slot_delta * kPointerSize)); | 582 __ AddP(sp, sp, Operand(sp_slot_delta * kPointerSize)); |
575 frame_access_state()->IncreaseSPDelta(-sp_slot_delta); | 583 frame_access_state()->IncreaseSPDelta(-sp_slot_delta); |
576 } | 584 } |
577 if (frame()->needs_frame()) { | 585 if (frame_access_state()->has_frame()) { |
578 __ RestoreFrameStateForTailCall(); | 586 __ RestoreFrameStateForTailCall(); |
579 } | 587 } |
580 frame_access_state()->SetFrameAccessToSP(); | 588 frame_access_state()->SetFrameAccessToSP(); |
581 } | 589 } |
582 | 590 |
583 void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg, | 591 void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg, |
584 Register scratch1, | 592 Register scratch1, |
585 Register scratch2, | 593 Register scratch2, |
586 Register scratch3) { | 594 Register scratch3) { |
587 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3)); | 595 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3)); |
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
733 case kArchRet: | 741 case kArchRet: |
734 AssembleReturn(); | 742 AssembleReturn(); |
735 break; | 743 break; |
736 case kArchStackPointer: | 744 case kArchStackPointer: |
737 __ LoadRR(i.OutputRegister(), sp); | 745 __ LoadRR(i.OutputRegister(), sp); |
738 break; | 746 break; |
739 case kArchFramePointer: | 747 case kArchFramePointer: |
740 __ LoadRR(i.OutputRegister(), fp); | 748 __ LoadRR(i.OutputRegister(), fp); |
741 break; | 749 break; |
742 case kArchParentFramePointer: | 750 case kArchParentFramePointer: |
743 if (frame_access_state()->frame()->needs_frame()) { | 751 if (frame_access_state()->has_frame()) { |
744 __ LoadP(i.OutputRegister(), MemOperand(fp, 0)); | 752 __ LoadP(i.OutputRegister(), MemOperand(fp, 0)); |
745 } else { | 753 } else { |
746 __ LoadRR(i.OutputRegister(), fp); | 754 __ LoadRR(i.OutputRegister(), fp); |
747 } | 755 } |
748 break; | 756 break; |
749 case kArchTruncateDoubleToI: | 757 case kArchTruncateDoubleToI: |
750 // TODO(mbrandy): move slow call to stub out of line. | 758 // TODO(mbrandy): move slow call to stub out of line. |
751 __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0)); | 759 __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0)); |
752 break; | 760 break; |
753 case kArchStoreWithWriteBarrier: { | 761 case kArchStoreWithWriteBarrier: { |
(...skipping 1019 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1773 isolate(), deoptimization_id, bailout_type); | 1781 isolate(), deoptimization_id, bailout_type); |
1774 // TODO(turbofan): We should be able to generate better code by sharing the | 1782 // TODO(turbofan): We should be able to generate better code by sharing the |
1775 // actual final call site and just bl'ing to it here, similar to what we do | 1783 // actual final call site and just bl'ing to it here, similar to what we do |
1776 // in the lithium backend. | 1784 // in the lithium backend. |
1777 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); | 1785 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); |
1778 } | 1786 } |
1779 | 1787 |
1780 void CodeGenerator::AssemblePrologue() { | 1788 void CodeGenerator::AssemblePrologue() { |
1781 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 1789 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
1782 | 1790 |
1783 if (frame()->needs_frame()) { | 1791 if (frame_access_state()->has_frame()) { |
1784 if (descriptor->IsCFunctionCall()) { | 1792 if (descriptor->IsCFunctionCall()) { |
1785 __ Push(r14, fp); | 1793 __ Push(r14, fp); |
1786 __ LoadRR(fp, sp); | 1794 __ LoadRR(fp, sp); |
1787 } else if (descriptor->IsJSFunctionCall()) { | 1795 } else if (descriptor->IsJSFunctionCall()) { |
1788 __ Prologue(this->info()->GeneratePreagedPrologue(), ip); | 1796 __ Prologue(this->info()->GeneratePreagedPrologue(), ip); |
1789 } else { | 1797 } else { |
1790 StackFrame::Type type = info()->GetOutputStackFrameType(); | 1798 StackFrame::Type type = info()->GetOutputStackFrameType(); |
1791 if (!ABI_CALL_VIA_IP && | 1799 // TODO(mbrandy): Detect cases where ip is the entrypoint (for |
1792 info()->output_code_kind() == Code::WASM_FUNCTION) { | 1800 // efficient intialization of the constant pool pointer register). |
1793 // TODO(mbrandy): Restrict only to the wasm wrapper case. | 1801 __ StubPrologue(type); |
1794 __ StubPrologue(type); | |
1795 } else { | |
1796 __ StubPrologue(type, ip); | |
1797 } | |
1798 } | 1802 } |
1799 } else { | |
1800 frame()->SetElidedFrameSizeInSlots(0); | |
1801 } | 1803 } |
1802 frame_access_state()->SetFrameAccessToDefault(); | |
1803 | 1804 |
1804 int stack_shrink_slots = frame()->GetSpillSlotCount(); | 1805 int stack_shrink_slots = frame()->GetSpillSlotCount(); |
1805 if (info()->is_osr()) { | 1806 if (info()->is_osr()) { |
1806 // TurboFan OSR-compiled functions cannot be entered directly. | 1807 // TurboFan OSR-compiled functions cannot be entered directly. |
1807 __ Abort(kShouldNotDirectlyEnterOsrFunction); | 1808 __ Abort(kShouldNotDirectlyEnterOsrFunction); |
1808 | 1809 |
1809 // Unoptimized code jumps directly to this entrypoint while the unoptimized | 1810 // Unoptimized code jumps directly to this entrypoint while the unoptimized |
1810 // frame is still on the stack. Optimized code uses OSR values directly from | 1811 // frame is still on the stack. Optimized code uses OSR values directly from |
1811 // the unoptimized frame. Thus, all that needs to be done is to allocate the | 1812 // the unoptimized frame. Thus, all that needs to be done is to allocate the |
1812 // remaining stack slots. | 1813 // remaining stack slots. |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1854 __ MultiPop(saves); | 1855 __ MultiPop(saves); |
1855 } | 1856 } |
1856 | 1857 |
1857 // Restore double registers. | 1858 // Restore double registers. |
1858 const RegList double_saves = descriptor->CalleeSavedFPRegisters(); | 1859 const RegList double_saves = descriptor->CalleeSavedFPRegisters(); |
1859 if (double_saves != 0) { | 1860 if (double_saves != 0) { |
1860 __ MultiPopDoubles(double_saves); | 1861 __ MultiPopDoubles(double_saves); |
1861 } | 1862 } |
1862 | 1863 |
1863 if (descriptor->IsCFunctionCall()) { | 1864 if (descriptor->IsCFunctionCall()) { |
1864 __ LeaveFrame(StackFrame::MANUAL, pop_count * kPointerSize); | 1865 AssembleDeconstructFrame(); |
1865 } else if (frame()->needs_frame()) { | 1866 } else if (frame_access_state()->has_frame()) { |
1866 // Canonicalize JSFunction return sites for now. | 1867 // Canonicalize JSFunction return sites for now. |
1867 if (return_label_.is_bound()) { | 1868 if (return_label_.is_bound()) { |
1868 __ b(&return_label_); | 1869 __ b(&return_label_); |
1869 return; | 1870 return; |
1870 } else { | 1871 } else { |
1871 __ bind(&return_label_); | 1872 __ bind(&return_label_); |
1872 __ LeaveFrame(StackFrame::MANUAL, pop_count * kPointerSize); | 1873 AssembleDeconstructFrame(); |
1873 } | 1874 } |
1874 } else { | |
1875 __ Drop(pop_count); | |
1876 } | 1875 } |
1877 __ Ret(); | 1876 __ Ret(pop_count); |
1878 } | 1877 } |
1879 | 1878 |
1880 void CodeGenerator::AssembleMove(InstructionOperand* source, | 1879 void CodeGenerator::AssembleMove(InstructionOperand* source, |
1881 InstructionOperand* destination) { | 1880 InstructionOperand* destination) { |
1882 S390OperandConverter g(this, nullptr); | 1881 S390OperandConverter g(this, nullptr); |
1883 // Dispatch on the source and destination operand kinds. Not all | 1882 // Dispatch on the source and destination operand kinds. Not all |
1884 // combinations are possible. | 1883 // combinations are possible. |
1885 if (source->IsRegister()) { | 1884 if (source->IsRegister()) { |
1886 DCHECK(destination->IsRegister() || destination->IsStackSlot()); | 1885 DCHECK(destination->IsRegister() || destination->IsStackSlot()); |
1887 Register src = g.ToRegister(source); | 1886 Register src = g.ToRegister(source); |
(...skipping 191 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2079 padding_size -= 2; | 2078 padding_size -= 2; |
2080 } | 2079 } |
2081 } | 2080 } |
2082 } | 2081 } |
2083 | 2082 |
2084 #undef __ | 2083 #undef __ |
2085 | 2084 |
2086 } // namespace compiler | 2085 } // namespace compiler |
2087 } // namespace internal | 2086 } // namespace internal |
2088 } // namespace v8 | 2087 } // namespace v8 |
OLD | NEW |