OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
6 | 6 |
7 #include "src/arm/macro-assembler-arm.h" | 7 #include "src/arm/macro-assembler-arm.h" |
8 #include "src/compiler/code-generator-impl.h" | 8 #include "src/compiler/code-generator-impl.h" |
9 #include "src/compiler/gap-resolver.h" | 9 #include "src/compiler/gap-resolver.h" |
10 #include "src/compiler/node-matchers.h" | 10 #include "src/compiler/node-matchers.h" |
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
130 } | 130 } |
131 }; | 131 }; |
132 | 132 |
133 | 133 |
134 // Assembles an instruction after register allocation, producing machine code. | 134 // Assembles an instruction after register allocation, producing machine code. |
135 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { | 135 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { |
136 ArmOperandConverter i(this, instr); | 136 ArmOperandConverter i(this, instr); |
137 | 137 |
138 switch (ArchOpcodeField::decode(instr->opcode())) { | 138 switch (ArchOpcodeField::decode(instr->opcode())) { |
139 case kArchCallCodeObject: { | 139 case kArchCallCodeObject: { |
| 140 EnsureSpaceForLazyDeopt(); |
140 if (instr->InputAt(0)->IsImmediate()) { | 141 if (instr->InputAt(0)->IsImmediate()) { |
141 __ Call(Handle<Code>::cast(i.InputHeapObject(0)), | 142 __ Call(Handle<Code>::cast(i.InputHeapObject(0)), |
142 RelocInfo::CODE_TARGET); | 143 RelocInfo::CODE_TARGET); |
143 } else { | 144 } else { |
144 __ add(ip, i.InputRegister(0), | 145 __ add(ip, i.InputRegister(0), |
145 Operand(Code::kHeaderSize - kHeapObjectTag)); | 146 Operand(Code::kHeaderSize - kHeapObjectTag)); |
146 __ Call(ip); | 147 __ Call(ip); |
147 } | 148 } |
148 AddSafepointAndDeopt(instr); | 149 AddSafepointAndDeopt(instr); |
149 DCHECK_EQ(LeaveCC, i.OutputSBit()); | 150 DCHECK_EQ(LeaveCC, i.OutputSBit()); |
150 break; | 151 break; |
151 } | 152 } |
152 case kArchCallJSFunction: { | 153 case kArchCallJSFunction: { |
| 154 EnsureSpaceForLazyDeopt(); |
153 Register func = i.InputRegister(0); | 155 Register func = i.InputRegister(0); |
154 if (FLAG_debug_code) { | 156 if (FLAG_debug_code) { |
155 // Check the function's context matches the context argument. | 157 // Check the function's context matches the context argument. |
156 __ ldr(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); | 158 __ ldr(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); |
157 __ cmp(cp, kScratchReg); | 159 __ cmp(cp, kScratchReg); |
158 __ Assert(eq, kWrongFunctionContext); | 160 __ Assert(eq, kWrongFunctionContext); |
159 } | 161 } |
160 __ ldr(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 162 __ ldr(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); |
161 __ Call(ip); | 163 __ Call(ip); |
162 AddSafepointAndDeopt(instr); | 164 AddSafepointAndDeopt(instr); |
(...skipping 673 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
836 // No other combinations are possible. | 838 // No other combinations are possible. |
837 UNREACHABLE(); | 839 UNREACHABLE(); |
838 } | 840 } |
839 } | 841 } |
840 | 842 |
841 | 843 |
842 void CodeGenerator::AddNopForSmiCodeInlining() { | 844 void CodeGenerator::AddNopForSmiCodeInlining() { |
843 // On 32-bit ARM we do not insert nops for inlined Smi code. | 845 // On 32-bit ARM we do not insert nops for inlined Smi code. |
844 } | 846 } |
845 | 847 |
| 848 |
| 849 void CodeGenerator::EnsureSpaceForLazyDeopt() { |
| 850 int space_needed = Deoptimizer::patch_size(); |
| 851 if (!linkage()->info()->IsStub()) { |
| 852 // Ensure that we have enough space after the previous lazy-bailout |
| 853 // instruction for patching the code here. |
| 854 int current_pc = masm()->pc_offset(); |
| 855 if (current_pc < last_lazy_deopt_pc_ + space_needed) { |
| 856 // Block literal pool emission for duration of padding. |
| 857 v8::internal::Assembler::BlockConstPoolScope block_const_pool(masm()); |
| 858 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; |
| 859 DCHECK_EQ(0, padding_size % v8::internal::Assembler::kInstrSize); |
| 860 while (padding_size > 0) { |
| 861 __ nop(); |
| 862 padding_size -= v8::internal::Assembler::kInstrSize; |
| 863 } |
| 864 } |
| 865 } |
| 866 MarkLazyDeoptSite(); |
| 867 } |
| 868 |
846 #undef __ | 869 #undef __ |
847 | 870 |
848 } // namespace compiler | 871 } // namespace compiler |
849 } // namespace internal | 872 } // namespace internal |
850 } // namespace v8 | 873 } // namespace v8 |
OLD | NEW |