Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
| 6 | 6 |
| 7 #include "src/ast/scopes.h" | 7 #include "src/ast/scopes.h" |
| 8 #include "src/compiler/code-generator-impl.h" | 8 #include "src/compiler/code-generator-impl.h" |
| 9 #include "src/compiler/gap-resolver.h" | 9 #include "src/compiler/gap-resolver.h" |
| 10 #include "src/compiler/node-matchers.h" | 10 #include "src/compiler/node-matchers.h" |
| (...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 180 OutOfLineRecordWrite(CodeGenerator* gen, Register object, int32_t offset, | 180 OutOfLineRecordWrite(CodeGenerator* gen, Register object, int32_t offset, |
| 181 Register value, Register scratch0, Register scratch1, | 181 Register value, Register scratch0, Register scratch1, |
| 182 RecordWriteMode mode) | 182 RecordWriteMode mode) |
| 183 : OutOfLineCode(gen), | 183 : OutOfLineCode(gen), |
| 184 object_(object), | 184 object_(object), |
| 185 offset_(no_reg), | 185 offset_(no_reg), |
| 186 offset_immediate_(offset), | 186 offset_immediate_(offset), |
| 187 value_(value), | 187 value_(value), |
| 188 scratch0_(scratch0), | 188 scratch0_(scratch0), |
| 189 scratch1_(scratch1), | 189 scratch1_(scratch1), |
| 190 mode_(mode) {} | 190 mode_(mode), |
| 191 must_save_lr_(!gen->frame_access_state()->has_frame()) {} | |
| 191 | 192 |
| 192 void Generate() final { | 193 void Generate() final { |
| 193 if (mode_ > RecordWriteMode::kValueIsPointer) { | 194 if (mode_ > RecordWriteMode::kValueIsPointer) { |
| 194 __ JumpIfSmi(value_, exit()); | 195 __ JumpIfSmi(value_, exit()); |
| 195 } | 196 } |
| 196 __ CheckPageFlag(value_, scratch0_, | 197 __ CheckPageFlag(value_, scratch0_, |
| 197 MemoryChunk::kPointersToHereAreInterestingMask, eq, | 198 MemoryChunk::kPointersToHereAreInterestingMask, eq, |
| 198 exit()); | 199 exit()); |
| 199 RememberedSetAction const remembered_set_action = | 200 RememberedSetAction const remembered_set_action = |
| 200 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET | 201 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET |
| 201 : OMIT_REMEMBERED_SET; | 202 : OMIT_REMEMBERED_SET; |
| 202 SaveFPRegsMode const save_fp_mode = | 203 SaveFPRegsMode const save_fp_mode = |
| 203 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs; | 204 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs; |
| 204 if (!frame()->needs_frame()) { | 205 if (must_save_lr_) { |
| 205 // We need to save and restore lr if the frame was elided. | 206 // We need to save and restore lr if the frame was elided. |
| 206 __ mflr(scratch1_); | 207 __ mflr(scratch1_); |
| 207 __ Push(scratch1_); | 208 __ Push(scratch1_); |
| 208 } | 209 } |
| 209 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_, | 210 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_, |
| 210 remembered_set_action, save_fp_mode); | 211 remembered_set_action, save_fp_mode); |
| 211 if (offset_.is(no_reg)) { | 212 if (offset_.is(no_reg)) { |
| 212 __ addi(scratch1_, object_, Operand(offset_immediate_)); | 213 __ addi(scratch1_, object_, Operand(offset_immediate_)); |
| 213 } else { | 214 } else { |
| 214 DCHECK_EQ(0, offset_immediate_); | 215 DCHECK_EQ(0, offset_immediate_); |
| 215 __ add(scratch1_, object_, offset_); | 216 __ add(scratch1_, object_, offset_); |
| 216 } | 217 } |
| 217 __ CallStub(&stub); | 218 __ CallStub(&stub); |
| 218 if (!frame()->needs_frame()) { | 219 if (must_save_lr_) { |
| 219 // We need to save and restore lr if the frame was elided. | 220 // We need to save and restore lr if the frame was elided. |
| 220 __ Pop(scratch1_); | 221 __ Pop(scratch1_); |
| 221 __ mtlr(scratch1_); | 222 __ mtlr(scratch1_); |
| 222 } | 223 } |
| 223 } | 224 } |
| 224 | 225 |
| 225 private: | 226 private: |
| 226 Register const object_; | 227 Register const object_; |
| 227 Register const offset_; | 228 Register const offset_; |
| 228 int32_t const offset_immediate_; // Valid if offset_.is(no_reg). | 229 int32_t const offset_immediate_; // Valid if offset_.is(no_reg). |
| 229 Register const value_; | 230 Register const value_; |
| 230 Register const scratch0_; | 231 Register const scratch0_; |
| 231 Register const scratch1_; | 232 Register const scratch1_; |
| 232 RecordWriteMode const mode_; | 233 RecordWriteMode const mode_; |
| 234 bool must_save_lr_; | |
| 233 }; | 235 }; |
| 234 | 236 |
| 235 | 237 |
| 236 Condition FlagsConditionToCondition(FlagsCondition condition, ArchOpcode op) { | 238 Condition FlagsConditionToCondition(FlagsCondition condition, ArchOpcode op) { |
| 237 switch (condition) { | 239 switch (condition) { |
| 238 case kEqual: | 240 case kEqual: |
| 239 return eq; | 241 return eq; |
| 240 case kNotEqual: | 242 case kNotEqual: |
| 241 return ne; | 243 return ne; |
| 242 case kSignedLessThan: | 244 case kSignedLessThan: |
| (...skipping 420 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 663 Register value = i.InputRegister(3); \ | 665 Register value = i.InputRegister(3); \ |
| 664 if (mode == kMode_MRI) { \ | 666 if (mode == kMode_MRI) { \ |
| 665 __ asm_instr(value, operand); \ | 667 __ asm_instr(value, operand); \ |
| 666 } else { \ | 668 } else { \ |
| 667 __ asm_instrx(value, operand); \ | 669 __ asm_instrx(value, operand); \ |
| 668 } \ | 670 } \ |
| 669 __ bind(&done); \ | 671 __ bind(&done); \ |
| 670 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ | 672 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ |
| 671 } while (0) | 673 } while (0) |
| 672 | 674 |
| 675 void CodeGenerator::AssembleDeconstructFrame() { | |
| 676 __ LeaveFrame(StackFrame::MANUAL); | |
| 677 } | |
| 678 | |
| 679 void CodeGenerator::AssembleSetupStackPointer() {} | |
| 673 | 680 |
| 674 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { | 681 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { |
| 675 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 682 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
| 676 if (sp_slot_delta > 0) { | 683 if (sp_slot_delta > 0) { |
| 677 __ Add(sp, sp, sp_slot_delta * kPointerSize, r0); | 684 __ Add(sp, sp, sp_slot_delta * kPointerSize, r0); |
| 678 } | 685 } |
| 679 frame_access_state()->SetFrameAccessToDefault(); | 686 frame_access_state()->SetFrameAccessToDefault(); |
| 680 } | 687 } |
| 681 | 688 |
| 682 | 689 |
| 683 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { | 690 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { |
| 684 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); | 691 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); |
| 685 if (sp_slot_delta < 0) { | 692 if (sp_slot_delta < 0) { |
| 686 __ Add(sp, sp, sp_slot_delta * kPointerSize, r0); | 693 __ Add(sp, sp, sp_slot_delta * kPointerSize, r0); |
| 687 frame_access_state()->IncreaseSPDelta(-sp_slot_delta); | 694 frame_access_state()->IncreaseSPDelta(-sp_slot_delta); |
| 688 } | 695 } |
| 689 if (frame()->needs_frame()) { | 696 if (frame_access_state()->has_frame()) { |
| 690 __ RestoreFrameStateForTailCall(); | 697 __ RestoreFrameStateForTailCall(); |
| 691 } | 698 } |
| 692 frame_access_state()->SetFrameAccessToSP(); | 699 frame_access_state()->SetFrameAccessToSP(); |
| 693 } | 700 } |
| 694 | 701 |
| 695 void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg, | 702 void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg, |
| 696 Register scratch1, | 703 Register scratch1, |
| 697 Register scratch2, | 704 Register scratch2, |
| 698 Register scratch3) { | 705 Register scratch3) { |
| 699 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3)); | 706 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3)); |
| (...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 860 break; | 867 break; |
| 861 case kArchStackPointer: | 868 case kArchStackPointer: |
| 862 __ mr(i.OutputRegister(), sp); | 869 __ mr(i.OutputRegister(), sp); |
| 863 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 870 DCHECK_EQ(LeaveRC, i.OutputRCBit()); |
| 864 break; | 871 break; |
| 865 case kArchFramePointer: | 872 case kArchFramePointer: |
| 866 __ mr(i.OutputRegister(), fp); | 873 __ mr(i.OutputRegister(), fp); |
| 867 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 874 DCHECK_EQ(LeaveRC, i.OutputRCBit()); |
| 868 break; | 875 break; |
| 869 case kArchParentFramePointer: | 876 case kArchParentFramePointer: |
| 870 if (frame_access_state()->frame()->needs_frame()) { | 877 if (frame_access_state()->has_frame()) { |
| 871 __ LoadP(i.OutputRegister(), MemOperand(fp, 0)); | 878 __ LoadP(i.OutputRegister(), MemOperand(fp, 0)); |
| 872 } else { | 879 } else { |
| 873 __ mr(i.OutputRegister(), fp); | 880 __ mr(i.OutputRegister(), fp); |
| 874 } | 881 } |
| 875 break; | 882 break; |
| 876 case kArchTruncateDoubleToI: | 883 case kArchTruncateDoubleToI: |
| 877 // TODO(mbrandy): move slow call to stub out of line. | 884 // TODO(mbrandy): move slow call to stub out of line. |
| 878 __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0)); | 885 __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0)); |
| 879 DCHECK_EQ(LeaveRC, i.OutputRCBit()); | 886 DCHECK_EQ(LeaveRC, i.OutputRCBit()); |
| 880 break; | 887 break; |
| (...skipping 807 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1688 isolate(), deoptimization_id, bailout_type); | 1695 isolate(), deoptimization_id, bailout_type); |
| 1689 // TODO(turbofan): We should be able to generate better code by sharing the | 1696 // TODO(turbofan): We should be able to generate better code by sharing the |
| 1690 // actual final call site and just bl'ing to it here, similar to what we do | 1697 // actual final call site and just bl'ing to it here, similar to what we do |
| 1691 // in the lithium backend. | 1698 // in the lithium backend. |
| 1692 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); | 1699 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY); |
| 1693 } | 1700 } |
| 1694 | 1701 |
| 1695 | 1702 |
| 1696 void CodeGenerator::AssemblePrologue() { | 1703 void CodeGenerator::AssemblePrologue() { |
| 1697 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 1704 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
| 1698 if (frame()->needs_frame()) { | 1705 if (frame_access_state()->has_frame()) { |
| 1699 if (descriptor->IsCFunctionCall()) { | 1706 if (descriptor->IsCFunctionCall()) { |
| 1700 __ function_descriptor(); | 1707 __ function_descriptor(); |
| 1701 __ mflr(r0); | 1708 __ mflr(r0); |
| 1702 if (FLAG_enable_embedded_constant_pool) { | 1709 if (FLAG_enable_embedded_constant_pool) { |
| 1703 __ Push(r0, fp, kConstantPoolRegister); | 1710 __ Push(r0, fp, kConstantPoolRegister); |
| 1704 // Adjust FP to point to saved FP. | 1711 // Adjust FP to point to saved FP. |
| 1705 __ subi(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset)); | 1712 __ subi(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset)); |
| 1706 } else { | 1713 } else { |
| 1707 __ Push(r0, fp); | 1714 __ Push(r0, fp); |
| 1708 __ mr(fp, sp); | 1715 __ mr(fp, sp); |
| 1709 } | 1716 } |
| 1710 } else if (descriptor->IsJSFunctionCall()) { | 1717 } else if (descriptor->IsJSFunctionCall()) { |
| 1711 __ Prologue(this->info()->GeneratePreagedPrologue(), ip); | 1718 __ Prologue(this->info()->GeneratePreagedPrologue(), ip); |
| 1712 } else { | 1719 } else { |
| 1713 StackFrame::Type type = info()->GetOutputStackFrameType(); | 1720 StackFrame::Type type = info()->GetOutputStackFrameType(); |
| 1714 if (!ABI_CALL_VIA_IP && | 1721 // TODO(mbrandy): Detect cases where ip is the entrypoint (for |
| 1715 info()->output_code_kind() == Code::WASM_FUNCTION) { | 1722 // efficient intialization of the constant pool pointer register). |
| 1716 // TODO(mbrandy): Restrict only to the wasm wrapper case. | 1723 __ StubPrologue(type); |
| 1717 __ StubPrologue(type); | |
| 1718 } else { | |
| 1719 __ StubPrologue(type, ip); | |
| 1720 } | |
| 1721 } | 1724 } |
| 1722 } else { | |
| 1723 frame()->SetElidedFrameSizeInSlots(0); | |
| 1724 } | 1725 } |
| 1725 frame_access_state()->SetFrameAccessToDefault(); | |
| 1726 | 1726 |
| 1727 int stack_shrink_slots = frame()->GetSpillSlotCount(); | 1727 int stack_shrink_slots = frame()->GetSpillSlotCount(); |
| 1728 if (info()->is_osr()) { | 1728 if (info()->is_osr()) { |
| 1729 // TurboFan OSR-compiled functions cannot be entered directly. | 1729 // TurboFan OSR-compiled functions cannot be entered directly. |
| 1730 __ Abort(kShouldNotDirectlyEnterOsrFunction); | 1730 __ Abort(kShouldNotDirectlyEnterOsrFunction); |
| 1731 | 1731 |
| 1732 // Unoptimized code jumps directly to this entrypoint while the unoptimized | 1732 // Unoptimized code jumps directly to this entrypoint while the unoptimized |
| 1733 // frame is still on the stack. Optimized code uses OSR values directly from | 1733 // frame is still on the stack. Optimized code uses OSR values directly from |
| 1734 // the unoptimized frame. Thus, all that needs to be done is to allocate the | 1734 // the unoptimized frame. Thus, all that needs to be done is to allocate the |
| 1735 // remaining stack slots. | 1735 // remaining stack slots. |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1784 __ MultiPop(saves); | 1784 __ MultiPop(saves); |
| 1785 } | 1785 } |
| 1786 | 1786 |
| 1787 // Restore double registers. | 1787 // Restore double registers. |
| 1788 const RegList double_saves = descriptor->CalleeSavedFPRegisters(); | 1788 const RegList double_saves = descriptor->CalleeSavedFPRegisters(); |
| 1789 if (double_saves != 0) { | 1789 if (double_saves != 0) { |
| 1790 __ MultiPopDoubles(double_saves); | 1790 __ MultiPopDoubles(double_saves); |
| 1791 } | 1791 } |
| 1792 | 1792 |
| 1793 if (descriptor->IsCFunctionCall()) { | 1793 if (descriptor->IsCFunctionCall()) { |
| 1794 __ LeaveFrame(StackFrame::MANUAL, pop_count * kPointerSize); | 1794 AssembleDeconstructFrame(); |
| 1795 } else if (frame()->needs_frame()) { | 1795 } else if (frame_access_state()->has_frame()) { |
| 1796 // Canonicalize JSFunction return sites for now. | 1796 // Canonicalize JSFunction return sites for now. |
| 1797 if (return_label_.is_bound()) { | 1797 if (return_label_.is_bound()) { |
| 1798 __ b(&return_label_); | 1798 __ b(&return_label_); |
| 1799 return; | 1799 return; |
| 1800 } else { | 1800 } else { |
| 1801 __ bind(&return_label_); | 1801 __ bind(&return_label_); |
| 1802 __ LeaveFrame(StackFrame::MANUAL, pop_count * kPointerSize); | 1802 AssembleDeconstructFrame(); |
| 1803 } | 1803 } |
| 1804 } else { | |
| 1805 __ Drop(pop_count); | |
|
Mircea Trofin
2016/03/30 22:39:24
What happened to the pop_count?
MTBrandyberry
2016/03/30 22:57:47
Since we can no longer can pass it via AssembleDec
| |
| 1806 } | 1804 } |
| 1807 __ Ret(); | 1805 __ Ret(pop_count); |
| 1808 } | 1806 } |
| 1809 | 1807 |
| 1810 | 1808 |
| 1811 void CodeGenerator::AssembleMove(InstructionOperand* source, | 1809 void CodeGenerator::AssembleMove(InstructionOperand* source, |
| 1812 InstructionOperand* destination) { | 1810 InstructionOperand* destination) { |
| 1813 PPCOperandConverter g(this, nullptr); | 1811 PPCOperandConverter g(this, nullptr); |
| 1814 // Dispatch on the source and destination operand kinds. Not all | 1812 // Dispatch on the source and destination operand kinds. Not all |
| 1815 // combinations are possible. | 1813 // combinations are possible. |
| 1816 if (source->IsRegister()) { | 1814 if (source->IsRegister()) { |
| 1817 DCHECK(destination->IsRegister() || destination->IsStackSlot()); | 1815 DCHECK(destination->IsRegister() || destination->IsStackSlot()); |
| (...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2011 padding_size -= v8::internal::Assembler::kInstrSize; | 2009 padding_size -= v8::internal::Assembler::kInstrSize; |
| 2012 } | 2010 } |
| 2013 } | 2011 } |
| 2014 } | 2012 } |
| 2015 | 2013 |
| 2016 #undef __ | 2014 #undef __ |
| 2017 | 2015 |
| 2018 } // namespace compiler | 2016 } // namespace compiler |
| 2019 } // namespace internal | 2017 } // namespace internal |
| 2020 } // namespace v8 | 2018 } // namespace v8 |
| OLD | NEW |