OLD | NEW |
| (Empty) |
1 // Copyright 2012 the V8 project authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #ifndef V8_MIPS_LITHIUM_CODEGEN_MIPS_H_ | |
6 #define V8_MIPS_LITHIUM_CODEGEN_MIPS_H_ | |
7 | |
8 #include "src/deoptimizer.h" | |
9 #include "src/lithium-codegen.h" | |
10 #include "src/mips64/lithium-gap-resolver-mips64.h" | |
11 #include "src/mips64/lithium-mips64.h" | |
12 #include "src/safepoint-table.h" | |
13 #include "src/scopes.h" | |
14 #include "src/utils.h" | |
15 | |
16 namespace v8 { | |
17 namespace internal { | |
18 | |
19 // Forward declarations. | |
20 class LDeferredCode; | |
21 class SafepointGenerator; | |
22 | |
23 class LCodeGen: public LCodeGenBase { | |
24 public: | |
25 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) | |
26 : LCodeGenBase(chunk, assembler, info), | |
27 deoptimizations_(4, info->zone()), | |
28 jump_table_(4, info->zone()), | |
29 inlined_function_count_(0), | |
30 scope_(info->scope()), | |
31 translations_(info->zone()), | |
32 deferred_(8, info->zone()), | |
33 osr_pc_offset_(-1), | |
34 frame_is_built_(false), | |
35 safepoints_(info->zone()), | |
36 resolver_(this), | |
37 expected_safepoint_kind_(Safepoint::kSimple) { | |
38 PopulateDeoptimizationLiteralsWithInlinedFunctions(); | |
39 } | |
40 | |
41 | |
42 int LookupDestination(int block_id) const { | |
43 return chunk()->LookupDestination(block_id); | |
44 } | |
45 | |
46 bool IsNextEmittedBlock(int block_id) const { | |
47 return LookupDestination(block_id) == GetNextEmittedBlock(); | |
48 } | |
49 | |
50 bool NeedsEagerFrame() const { | |
51 return GetStackSlotCount() > 0 || | |
52 info()->is_non_deferred_calling() || | |
53 !info()->IsStub() || | |
54 info()->requires_frame(); | |
55 } | |
56 bool NeedsDeferredFrame() const { | |
57 return !NeedsEagerFrame() && info()->is_deferred_calling(); | |
58 } | |
59 | |
60 RAStatus GetRAState() const { | |
61 return frame_is_built_ ? kRAHasBeenSaved : kRAHasNotBeenSaved; | |
62 } | |
63 | |
64 // Support for converting LOperands to assembler types. | |
65 // LOperand must be a register. | |
66 Register ToRegister(LOperand* op) const; | |
67 | |
68 // LOperand is loaded into scratch, unless already a register. | |
69 Register EmitLoadRegister(LOperand* op, Register scratch); | |
70 | |
71 // LOperand must be a double register. | |
72 DoubleRegister ToDoubleRegister(LOperand* op) const; | |
73 | |
74 // LOperand is loaded into dbl_scratch, unless already a double register. | |
75 DoubleRegister EmitLoadDoubleRegister(LOperand* op, | |
76 FloatRegister flt_scratch, | |
77 DoubleRegister dbl_scratch); | |
78 int64_t ToRepresentation_donotuse(LConstantOperand* op, | |
79 const Representation& r) const; | |
80 int32_t ToInteger32(LConstantOperand* op) const; | |
81 Smi* ToSmi(LConstantOperand* op) const; | |
82 double ToDouble(LConstantOperand* op) const; | |
83 Operand ToOperand(LOperand* op); | |
84 MemOperand ToMemOperand(LOperand* op) const; | |
85 // Returns a MemOperand pointing to the high word of a DoubleStackSlot. | |
86 MemOperand ToHighMemOperand(LOperand* op) const; | |
87 | |
88 bool IsInteger32(LConstantOperand* op) const; | |
89 bool IsSmi(LConstantOperand* op) const; | |
90 Handle<Object> ToHandle(LConstantOperand* op) const; | |
91 | |
92 // Try to generate code for the entire chunk, but it may fail if the | |
93 // chunk contains constructs we cannot handle. Returns true if the | |
94 // code generation attempt succeeded. | |
95 bool GenerateCode(); | |
96 | |
97 // Finish the code by setting stack height, safepoint, and bailout | |
98 // information on it. | |
99 void FinishCode(Handle<Code> code); | |
100 | |
101 void DoDeferredNumberTagD(LNumberTagD* instr); | |
102 | |
103 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; | |
104 void DoDeferredNumberTagIU(LInstruction* instr, | |
105 LOperand* value, | |
106 LOperand* temp1, | |
107 LOperand* temp2, | |
108 IntegerSignedness signedness); | |
109 | |
110 void DoDeferredTaggedToI(LTaggedToI* instr); | |
111 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr); | |
112 void DoDeferredStackCheck(LStackCheck* instr); | |
113 void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr); | |
114 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); | |
115 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); | |
116 void DoDeferredAllocate(LAllocate* instr); | |
117 | |
118 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); | |
119 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, | |
120 Register result, | |
121 Register object, | |
122 Register index); | |
123 | |
124 // Parallel move support. | |
125 void DoParallelMove(LParallelMove* move); | |
126 void DoGap(LGap* instr); | |
127 | |
128 MemOperand PrepareKeyedOperand(Register key, | |
129 Register base, | |
130 bool key_is_constant, | |
131 int constant_key, | |
132 int element_size, | |
133 int shift_size, | |
134 int base_offset); | |
135 | |
136 // Emit frame translation commands for an environment. | |
137 void WriteTranslation(LEnvironment* environment, Translation* translation); | |
138 | |
139 // Declare methods that deal with the individual node types. | |
140 #define DECLARE_DO(type) void Do##type(L##type* node); | |
141 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) | |
142 #undef DECLARE_DO | |
143 | |
144 private: | |
145 LanguageMode language_mode() const { return info()->language_mode(); } | |
146 | |
147 Scope* scope() const { return scope_; } | |
148 | |
149 Register scratch0() { return kLithiumScratchReg; } | |
150 Register scratch1() { return kLithiumScratchReg2; } | |
151 DoubleRegister double_scratch0() { return kLithiumScratchDouble; } | |
152 | |
153 LInstruction* GetNextInstruction(); | |
154 | |
155 void EmitClassOfTest(Label* if_true, | |
156 Label* if_false, | |
157 Handle<String> class_name, | |
158 Register input, | |
159 Register temporary, | |
160 Register temporary2); | |
161 | |
162 int GetStackSlotCount() const { return chunk()->spill_slot_count(); } | |
163 | |
164 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } | |
165 | |
166 void SaveCallerDoubles(); | |
167 void RestoreCallerDoubles(); | |
168 | |
169 // Code generation passes. Returns true if code generation should | |
170 // continue. | |
171 void GenerateBodyInstructionPre(LInstruction* instr) override; | |
172 bool GeneratePrologue(); | |
173 bool GenerateDeferredCode(); | |
174 bool GenerateJumpTable(); | |
175 bool GenerateSafepointTable(); | |
176 | |
177 // Generates the custom OSR entrypoint and sets the osr_pc_offset. | |
178 void GenerateOsrPrologue(); | |
179 | |
180 enum SafepointMode { | |
181 RECORD_SIMPLE_SAFEPOINT, | |
182 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS | |
183 }; | |
184 | |
185 void CallCode(Handle<Code> code, | |
186 RelocInfo::Mode mode, | |
187 LInstruction* instr); | |
188 | |
189 void CallCodeGeneric(Handle<Code> code, | |
190 RelocInfo::Mode mode, | |
191 LInstruction* instr, | |
192 SafepointMode safepoint_mode); | |
193 | |
194 void CallRuntime(const Runtime::Function* function, | |
195 int num_arguments, | |
196 LInstruction* instr, | |
197 SaveFPRegsMode save_doubles = kDontSaveFPRegs); | |
198 | |
199 void CallRuntime(Runtime::FunctionId id, | |
200 int num_arguments, | |
201 LInstruction* instr) { | |
202 const Runtime::Function* function = Runtime::FunctionForId(id); | |
203 CallRuntime(function, num_arguments, instr); | |
204 } | |
205 | |
206 void LoadContextFromDeferred(LOperand* context); | |
207 void CallRuntimeFromDeferred(Runtime::FunctionId id, | |
208 int argc, | |
209 LInstruction* instr, | |
210 LOperand* context); | |
211 | |
212 // Generate a direct call to a known function. Expects the function | |
213 // to be in a1. | |
214 void CallKnownFunction(Handle<JSFunction> function, | |
215 int formal_parameter_count, int arity, | |
216 LInstruction* instr); | |
217 | |
218 void RecordSafepointWithLazyDeopt(LInstruction* instr, | |
219 SafepointMode safepoint_mode); | |
220 | |
221 void RegisterEnvironmentForDeoptimization(LEnvironment* environment, | |
222 Safepoint::DeoptMode mode); | |
223 void DeoptimizeIf(Condition condition, LInstruction* instr, | |
224 Deoptimizer::DeoptReason deopt_reason, | |
225 Deoptimizer::BailoutType bailout_type, | |
226 Register src1 = zero_reg, | |
227 const Operand& src2 = Operand(zero_reg)); | |
228 void DeoptimizeIf( | |
229 Condition condition, LInstruction* instr, | |
230 Deoptimizer::DeoptReason deopt_reason = Deoptimizer::kNoReason, | |
231 Register src1 = zero_reg, const Operand& src2 = Operand(zero_reg)); | |
232 | |
233 void AddToTranslation(LEnvironment* environment, | |
234 Translation* translation, | |
235 LOperand* op, | |
236 bool is_tagged, | |
237 bool is_uint32, | |
238 int* object_index_pointer, | |
239 int* dematerialized_index_pointer); | |
240 void PopulateDeoptimizationData(Handle<Code> code); | |
241 | |
242 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); | |
243 | |
244 Register ToRegister(int index) const; | |
245 DoubleRegister ToDoubleRegister(int index) const; | |
246 | |
247 MemOperand BuildSeqStringOperand(Register string, | |
248 LOperand* index, | |
249 String::Encoding encoding); | |
250 | |
251 void EmitIntegerMathAbs(LMathAbs* instr); | |
252 void EmitSmiMathAbs(LMathAbs* instr); | |
253 | |
254 // Support for recording safepoint and position information. | |
255 void RecordSafepoint(LPointerMap* pointers, | |
256 Safepoint::Kind kind, | |
257 int arguments, | |
258 Safepoint::DeoptMode mode); | |
259 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); | |
260 void RecordSafepoint(Safepoint::DeoptMode mode); | |
261 void RecordSafepointWithRegisters(LPointerMap* pointers, | |
262 int arguments, | |
263 Safepoint::DeoptMode mode); | |
264 | |
265 void RecordAndWritePosition(int position) override; | |
266 | |
267 static Condition TokenToCondition(Token::Value op, bool is_unsigned); | |
268 void EmitGoto(int block); | |
269 | |
270 // EmitBranch expects to be the last instruction of a block. | |
271 template<class InstrType> | |
272 void EmitBranch(InstrType instr, | |
273 Condition condition, | |
274 Register src1, | |
275 const Operand& src2); | |
276 template<class InstrType> | |
277 void EmitBranchF(InstrType instr, | |
278 Condition condition, | |
279 FPURegister src1, | |
280 FPURegister src2); | |
281 template <class InstrType> | |
282 void EmitTrueBranch(InstrType instr, Condition condition, Register src1, | |
283 const Operand& src2); | |
284 template <class InstrType> | |
285 void EmitFalseBranch(InstrType instr, Condition condition, Register src1, | |
286 const Operand& src2); | |
287 template<class InstrType> | |
288 void EmitFalseBranchF(InstrType instr, | |
289 Condition condition, | |
290 FPURegister src1, | |
291 FPURegister src2); | |
292 void EmitCmpI(LOperand* left, LOperand* right); | |
293 void EmitNumberUntagD(LNumberUntagD* instr, Register input, | |
294 DoubleRegister result, NumberUntagDMode mode); | |
295 | |
296 // Emits optimized code for typeof x == "y". Modifies input register. | |
297 // Returns the condition on which a final split to | |
298 // true and false label should be made, to optimize fallthrough. | |
299 // Returns two registers in cmp1 and cmp2 that can be used in the | |
300 // Branch instruction after EmitTypeofIs. | |
301 Condition EmitTypeofIs(Label* true_label, | |
302 Label* false_label, | |
303 Register input, | |
304 Handle<String> type_name, | |
305 Register* cmp1, | |
306 Operand* cmp2); | |
307 | |
308 // Emits optimized code for %_IsString(x). Preserves input register. | |
309 // Returns the condition on which a final split to | |
310 // true and false label should be made, to optimize fallthrough. | |
311 Condition EmitIsString(Register input, | |
312 Register temp1, | |
313 Label* is_not_string, | |
314 SmiCheck check_needed); | |
315 | |
316 // Emits optimized code for %_IsConstructCall(). | |
317 // Caller should branch on equal condition. | |
318 void EmitIsConstructCall(Register temp1, Register temp2); | |
319 | |
320 // Emits optimized code to deep-copy the contents of statically known | |
321 // object graphs (e.g. object literal boilerplate). | |
322 void EmitDeepCopy(Handle<JSObject> object, | |
323 Register result, | |
324 Register source, | |
325 int* offset, | |
326 AllocationSiteMode mode); | |
327 // Emit optimized code for integer division. | |
328 // Inputs are signed. | |
329 // All registers are clobbered. | |
330 // If 'remainder' is no_reg, it is not computed. | |
331 void EmitSignedIntegerDivisionByConstant(Register result, | |
332 Register dividend, | |
333 int32_t divisor, | |
334 Register remainder, | |
335 Register scratch, | |
336 LEnvironment* environment); | |
337 | |
338 | |
339 void EnsureSpaceForLazyDeopt(int space_needed) override; | |
340 void DoLoadKeyedExternalArray(LLoadKeyed* instr); | |
341 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr); | |
342 void DoLoadKeyedFixedArray(LLoadKeyed* instr); | |
343 void DoStoreKeyedExternalArray(LStoreKeyed* instr); | |
344 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr); | |
345 void DoStoreKeyedFixedArray(LStoreKeyed* instr); | |
346 | |
347 template <class T> | |
348 void EmitVectorLoadICRegisters(T* instr); | |
349 template <class T> | |
350 void EmitVectorStoreICRegisters(T* instr); | |
351 | |
352 ZoneList<LEnvironment*> deoptimizations_; | |
353 ZoneList<Deoptimizer::JumpTableEntry*> jump_table_; | |
354 int inlined_function_count_; | |
355 Scope* const scope_; | |
356 TranslationBuffer translations_; | |
357 ZoneList<LDeferredCode*> deferred_; | |
358 int osr_pc_offset_; | |
359 bool frame_is_built_; | |
360 | |
361 // Builder that keeps track of safepoints in the code. The table | |
362 // itself is emitted at the end of the generated code. | |
363 SafepointTableBuilder safepoints_; | |
364 | |
365 // Compiler from a set of parallel moves to a sequential list of moves. | |
366 LGapResolver resolver_; | |
367 | |
368 Safepoint::Kind expected_safepoint_kind_; | |
369 | |
370 class PushSafepointRegistersScope final BASE_EMBEDDED { | |
371 public: | |
372 explicit PushSafepointRegistersScope(LCodeGen* codegen) | |
373 : codegen_(codegen) { | |
374 DCHECK(codegen_->info()->is_calling()); | |
375 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); | |
376 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters; | |
377 | |
378 StoreRegistersStateStub stub(codegen_->isolate()); | |
379 codegen_->masm_->push(ra); | |
380 codegen_->masm_->CallStub(&stub); | |
381 } | |
382 | |
383 ~PushSafepointRegistersScope() { | |
384 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters); | |
385 RestoreRegistersStateStub stub(codegen_->isolate()); | |
386 codegen_->masm_->push(ra); | |
387 codegen_->masm_->CallStub(&stub); | |
388 codegen_->expected_safepoint_kind_ = Safepoint::kSimple; | |
389 } | |
390 | |
391 private: | |
392 LCodeGen* codegen_; | |
393 }; | |
394 | |
395 friend class LDeferredCode; | |
396 friend class LEnvironment; | |
397 friend class SafepointGenerator; | |
398 DISALLOW_COPY_AND_ASSIGN(LCodeGen); | |
399 }; | |
400 | |
401 | |
402 class LDeferredCode : public ZoneObject { | |
403 public: | |
404 explicit LDeferredCode(LCodeGen* codegen) | |
405 : codegen_(codegen), | |
406 external_exit_(NULL), | |
407 instruction_index_(codegen->current_instruction_) { | |
408 codegen->AddDeferredCode(this); | |
409 } | |
410 | |
411 virtual ~LDeferredCode() {} | |
412 virtual void Generate() = 0; | |
413 virtual LInstruction* instr() = 0; | |
414 | |
415 void SetExit(Label* exit) { external_exit_ = exit; } | |
416 Label* entry() { return &entry_; } | |
417 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; } | |
418 int instruction_index() const { return instruction_index_; } | |
419 | |
420 protected: | |
421 LCodeGen* codegen() const { return codegen_; } | |
422 MacroAssembler* masm() const { return codegen_->masm(); } | |
423 | |
424 private: | |
425 LCodeGen* codegen_; | |
426 Label entry_; | |
427 Label exit_; | |
428 Label* external_exit_; | |
429 int instruction_index_; | |
430 }; | |
431 | |
432 } // namespace internal | |
433 } // namespace v8 | |
434 | |
435 #endif // V8_MIPS_LITHIUM_CODEGEN_MIPS_H_ | |
OLD | NEW |