OLD | NEW |
---|---|
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
6 | 6 |
7 #include "src/arm64/macro-assembler-arm64.h" | 7 #include "src/arm64/macro-assembler-arm64.h" |
8 #include "src/compiler/code-generator-impl.h" | 8 #include "src/compiler/code-generator-impl.h" |
9 #include "src/compiler/gap-resolver.h" | 9 #include "src/compiler/gap-resolver.h" |
10 #include "src/compiler/node-matchers.h" | 10 #include "src/compiler/node-matchers.h" |
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
130 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { | 130 void CodeGenerator::AssembleArchInstruction(Instruction* instr) { |
131 Arm64OperandConverter i(this, instr); | 131 Arm64OperandConverter i(this, instr); |
132 InstructionCode opcode = instr->opcode(); | 132 InstructionCode opcode = instr->opcode(); |
133 switch (ArchOpcodeField::decode(opcode)) { | 133 switch (ArchOpcodeField::decode(opcode)) { |
134 case kArchCallAddress: { | 134 case kArchCallAddress: { |
135 DirectCEntryStub stub(isolate()); | 135 DirectCEntryStub stub(isolate()); |
136 stub.GenerateCall(masm(), i.InputRegister(0)); | 136 stub.GenerateCall(masm(), i.InputRegister(0)); |
137 break; | 137 break; |
138 } | 138 } |
139 case kArchCallCodeObject: { | 139 case kArchCallCodeObject: { |
140 EnsureSpaceForLazyDeopt(); | |
140 if (instr->InputAt(0)->IsImmediate()) { | 141 if (instr->InputAt(0)->IsImmediate()) { |
141 __ Call(Handle<Code>::cast(i.InputHeapObject(0)), | 142 __ Call(Handle<Code>::cast(i.InputHeapObject(0)), |
142 RelocInfo::CODE_TARGET); | 143 RelocInfo::CODE_TARGET); |
143 } else { | 144 } else { |
144 Register target = i.InputRegister(0); | 145 Register target = i.InputRegister(0); |
145 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag); | 146 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag); |
146 __ Call(target); | 147 __ Call(target); |
147 } | 148 } |
148 AddSafepointAndDeopt(instr); | 149 AddSafepointAndDeopt(instr); |
149 break; | 150 break; |
150 } | 151 } |
151 case kArchCallJSFunction: { | 152 case kArchCallJSFunction: { |
153 EnsureSpaceForLazyDeopt(); | |
152 Register func = i.InputRegister(0); | 154 Register func = i.InputRegister(0); |
153 if (FLAG_debug_code) { | 155 if (FLAG_debug_code) { |
154 // Check the function's context matches the context argument. | 156 // Check the function's context matches the context argument. |
155 UseScratchRegisterScope scope(masm()); | 157 UseScratchRegisterScope scope(masm()); |
156 Register temp = scope.AcquireX(); | 158 Register temp = scope.AcquireX(); |
157 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset)); | 159 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset)); |
158 __ cmp(cp, temp); | 160 __ cmp(cp, temp); |
159 __ Assert(eq, kWrongFunctionContext); | 161 __ Assert(eq, kWrongFunctionContext); |
160 } | 162 } |
161 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 163 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); |
(...skipping 685 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
847 } | 849 } |
848 } else { | 850 } else { |
849 // No other combinations are possible. | 851 // No other combinations are possible. |
850 UNREACHABLE(); | 852 UNREACHABLE(); |
851 } | 853 } |
852 } | 854 } |
853 | 855 |
854 | 856 |
855 void CodeGenerator::AddNopForSmiCodeInlining() { __ movz(xzr, 0); } | 857 void CodeGenerator::AddNopForSmiCodeInlining() { __ movz(xzr, 0); } |
856 | 858 |
859 | |
860 void CodeGenerator::EnsureSpaceForLazyDeopt() { | |
861 int space_needed = Deoptimizer::patch_size(); | |
862 if (!linkage()->info()->IsStub()) { | |
863 // Ensure that we have enough space after the previous lazy-bailout | |
864 // instruction for patching the code here. | |
865 intptr_t current_pc = masm()->pc_offset(); | |
866 | |
867 if (current_pc < (last_lazy_deopt_pc_ + space_needed)) { | |
868 ptrdiff_t padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; | |
Benedikt Meurer
2014/09/22 07:23:37
ptrdiff_t is wrong here, should be int.
| |
869 DCHECK((padding_size % kInstructionSize) == 0); | |
870 InstructionAccurateScope instruction_accurate( | |
871 masm(), padding_size / kInstructionSize); | |
872 | |
873 while (padding_size > 0) { | |
874 __ nop(); | |
875 padding_size -= kInstructionSize; | |
876 } | |
877 } | |
878 } | |
879 MarkLazyDeoptSite(); | |
880 } | |
881 | |
857 #undef __ | 882 #undef __ |
858 | 883 |
859 } // namespace compiler | 884 } // namespace compiler |
860 } // namespace internal | 885 } // namespace internal |
861 } // namespace v8 | 886 } // namespace v8 |
OLD | NEW |