OLD | NEW |
| (Empty) |
1 // Copyright 2013 the V8 project authors. All rights reserved. | |
2 // Redistribution and use in source and binary forms, with or without | |
3 // modification, are permitted provided that the following conditions are | |
4 // met: | |
5 // | |
6 // * Redistributions of source code must retain the above copyright | |
7 // notice, this list of conditions and the following disclaimer. | |
8 // * Redistributions in binary form must reproduce the above | |
9 // copyright notice, this list of conditions and the following | |
10 // disclaimer in the documentation and/or other materials provided | |
11 // with the distribution. | |
12 // * Neither the name of Google Inc. nor the names of its | |
13 // contributors may be used to endorse or promote products derived | |
14 // from this software without specific prior written permission. | |
15 // | |
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | |
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | |
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | |
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | |
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | |
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
27 | |
28 #ifndef V8_A64_LITHIUM_CODEGEN_A64_H_ | |
29 #define V8_A64_LITHIUM_CODEGEN_A64_H_ | |
30 | |
31 #include "a64/lithium-a64.h" | |
32 | |
33 #include "a64/lithium-gap-resolver-a64.h" | |
34 #include "deoptimizer.h" | |
35 #include "lithium-codegen.h" | |
36 #include "safepoint-table.h" | |
37 #include "scopes.h" | |
38 #include "v8utils.h" | |
39 | |
40 namespace v8 { | |
41 namespace internal { | |
42 | |
43 // Forward declarations. | |
44 class LDeferredCode; | |
45 class SafepointGenerator; | |
46 class BranchGenerator; | |
47 | |
48 class LCodeGen: public LCodeGenBase { | |
49 public: | |
50 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) | |
51 : LCodeGenBase(chunk, assembler, info), | |
52 deoptimizations_(4, info->zone()), | |
53 deopt_jump_table_(4, info->zone()), | |
54 deoptimization_literals_(8, info->zone()), | |
55 inlined_function_count_(0), | |
56 scope_(info->scope()), | |
57 translations_(info->zone()), | |
58 deferred_(8, info->zone()), | |
59 osr_pc_offset_(-1), | |
60 frame_is_built_(false), | |
61 safepoints_(info->zone()), | |
62 resolver_(this), | |
63 expected_safepoint_kind_(Safepoint::kSimple) { | |
64 PopulateDeoptimizationLiteralsWithInlinedFunctions(); | |
65 } | |
66 | |
67 // Simple accessors. | |
68 Scope* scope() const { return scope_; } | |
69 | |
70 int LookupDestination(int block_id) const { | |
71 return chunk()->LookupDestination(block_id); | |
72 } | |
73 | |
74 bool IsNextEmittedBlock(int block_id) const { | |
75 return LookupDestination(block_id) == GetNextEmittedBlock(); | |
76 } | |
77 | |
78 bool NeedsEagerFrame() const { | |
79 return GetStackSlotCount() > 0 || | |
80 info()->is_non_deferred_calling() || | |
81 !info()->IsStub() || | |
82 info()->requires_frame(); | |
83 } | |
84 bool NeedsDeferredFrame() const { | |
85 return !NeedsEagerFrame() && info()->is_deferred_calling(); | |
86 } | |
87 | |
88 LinkRegisterStatus GetLinkRegisterState() const { | |
89 return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved; | |
90 } | |
91 | |
92 // Try to generate code for the entire chunk, but it may fail if the | |
93 // chunk contains constructs we cannot handle. Returns true if the | |
94 // code generation attempt succeeded. | |
95 bool GenerateCode(); | |
96 | |
97 // Finish the code by setting stack height, safepoint, and bailout | |
98 // information on it. | |
99 void FinishCode(Handle<Code> code); | |
100 | |
101 // Support for converting LOperands to assembler types. | |
102 // LOperand must be a register. | |
103 Register ToRegister(LOperand* op) const; | |
104 Register ToRegister32(LOperand* op) const; | |
105 Operand ToOperand(LOperand* op); | |
106 Operand ToOperand32I(LOperand* op); | |
107 Operand ToOperand32U(LOperand* op); | |
108 MemOperand ToMemOperand(LOperand* op) const; | |
109 Handle<Object> ToHandle(LConstantOperand* op) const; | |
110 | |
111 // TODO(jbramley): Examine these helpers and check that they make sense. | |
112 // IsInteger32Constant returns true for smi constants, for example. | |
113 bool IsInteger32Constant(LConstantOperand* op) const; | |
114 bool IsSmi(LConstantOperand* op) const; | |
115 | |
116 int32_t ToInteger32(LConstantOperand* op) const; | |
117 Smi* ToSmi(LConstantOperand* op) const; | |
118 double ToDouble(LConstantOperand* op) const; | |
119 DoubleRegister ToDoubleRegister(LOperand* op) const; | |
120 | |
121 // Declare methods that deal with the individual node types. | |
122 #define DECLARE_DO(type) void Do##type(L##type* node); | |
123 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) | |
124 #undef DECLARE_DO | |
125 | |
126 private: | |
127 // Return a double scratch register which can be used locally | |
128 // when generating code for a lithium instruction. | |
129 DoubleRegister double_scratch() { return crankshaft_fp_scratch; } | |
130 | |
131 // Deferred code support. | |
132 void DoDeferredNumberTagD(LNumberTagD* instr); | |
133 void DoDeferredStackCheck(LStackCheck* instr); | |
134 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); | |
135 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); | |
136 void DoDeferredMathAbsTagged(LMathAbsTagged* instr, | |
137 Label* exit, | |
138 Label* allocation_entry); | |
139 | |
140 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; | |
141 void DoDeferredNumberTagU(LInstruction* instr, | |
142 LOperand* value, | |
143 LOperand* temp1, | |
144 LOperand* temp2); | |
145 void DoDeferredTaggedToI(LTaggedToI* instr, | |
146 LOperand* value, | |
147 LOperand* temp1, | |
148 LOperand* temp2); | |
149 void DoDeferredAllocate(LAllocate* instr); | |
150 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr); | |
151 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); | |
152 | |
153 Operand ToOperand32(LOperand* op, IntegerSignedness signedness); | |
154 | |
155 static Condition TokenToCondition(Token::Value op, bool is_unsigned); | |
156 void EmitGoto(int block); | |
157 void DoGap(LGap* instr); | |
158 | |
159 // Generic version of EmitBranch. It contains some code to avoid emitting a | |
160 // branch on the next emitted basic block where we could just fall-through. | |
161 // You shouldn't use that directly but rather consider one of the helper like | |
162 // LCodeGen::EmitBranch, LCodeGen::EmitCompareAndBranch... | |
163 template<class InstrType> | |
164 void EmitBranchGeneric(InstrType instr, | |
165 const BranchGenerator& branch); | |
166 | |
167 template<class InstrType> | |
168 void EmitBranch(InstrType instr, Condition condition); | |
169 | |
170 template<class InstrType> | |
171 void EmitCompareAndBranch(InstrType instr, | |
172 Condition condition, | |
173 const Register& lhs, | |
174 const Operand& rhs); | |
175 | |
176 template<class InstrType> | |
177 void EmitTestAndBranch(InstrType instr, | |
178 Condition condition, | |
179 const Register& value, | |
180 uint64_t mask); | |
181 | |
182 template<class InstrType> | |
183 void EmitBranchIfNonZeroNumber(InstrType instr, | |
184 const FPRegister& value, | |
185 const FPRegister& scratch); | |
186 | |
187 template<class InstrType> | |
188 void EmitBranchIfHeapNumber(InstrType instr, | |
189 const Register& value); | |
190 | |
191 template<class InstrType> | |
192 void EmitBranchIfRoot(InstrType instr, | |
193 const Register& value, | |
194 Heap::RootListIndex index); | |
195 | |
196 // Emits optimized code to deep-copy the contents of statically known object | |
197 // graphs (e.g. object literal boilerplate). Expects a pointer to the | |
198 // allocated destination object in the result register, and a pointer to the | |
199 // source object in the source register. | |
200 void EmitDeepCopy(Handle<JSObject> object, | |
201 Register result, | |
202 Register source, | |
203 Register scratch, | |
204 int* offset, | |
205 AllocationSiteMode mode); | |
206 | |
207 // Emits optimized code for %_IsString(x). Preserves input register. | |
208 // Returns the condition on which a final split to | |
209 // true and false label should be made, to optimize fallthrough. | |
210 Condition EmitIsString(Register input, Register temp1, Label* is_not_string, | |
211 SmiCheck check_needed); | |
212 | |
213 int DefineDeoptimizationLiteral(Handle<Object> literal); | |
214 void PopulateDeoptimizationData(Handle<Code> code); | |
215 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); | |
216 | |
217 MemOperand BuildSeqStringOperand(Register string, | |
218 Register temp, | |
219 LOperand* index, | |
220 String::Encoding encoding); | |
221 void DeoptimizeBranch( | |
222 LEnvironment* environment, | |
223 BranchType branch_type, Register reg = NoReg, int bit = -1, | |
224 Deoptimizer::BailoutType* override_bailout_type = NULL); | |
225 void Deoptimize(LEnvironment* environment, | |
226 Deoptimizer::BailoutType* override_bailout_type = NULL); | |
227 void DeoptimizeIf(Condition cc, LEnvironment* environment); | |
228 void DeoptimizeIfZero(Register rt, LEnvironment* environment); | |
229 void DeoptimizeIfNotZero(Register rt, LEnvironment* environment); | |
230 void DeoptimizeIfNegative(Register rt, LEnvironment* environment); | |
231 void DeoptimizeIfSmi(Register rt, LEnvironment* environment); | |
232 void DeoptimizeIfNotSmi(Register rt, LEnvironment* environment); | |
233 void DeoptimizeIfRoot(Register rt, | |
234 Heap::RootListIndex index, | |
235 LEnvironment* environment); | |
236 void DeoptimizeIfNotRoot(Register rt, | |
237 Heap::RootListIndex index, | |
238 LEnvironment* environment); | |
239 void DeoptimizeIfMinusZero(DoubleRegister input, LEnvironment* environment); | |
240 void ApplyCheckIf(Condition cc, LBoundsCheck* check); | |
241 | |
242 MemOperand PrepareKeyedExternalArrayOperand(Register key, | |
243 Register base, | |
244 Register scratch, | |
245 bool key_is_smi, | |
246 bool key_is_constant, | |
247 int constant_key, | |
248 ElementsKind elements_kind, | |
249 int additional_index); | |
250 void CalcKeyedArrayBaseRegister(Register base, | |
251 Register elements, | |
252 Register key, | |
253 bool key_is_tagged, | |
254 ElementsKind elements_kind); | |
255 | |
256 void RegisterEnvironmentForDeoptimization(LEnvironment* environment, | |
257 Safepoint::DeoptMode mode); | |
258 | |
259 int GetStackSlotCount() const { return chunk()->spill_slot_count(); } | |
260 | |
261 void Abort(BailoutReason reason); | |
262 | |
263 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } | |
264 | |
265 // Emit frame translation commands for an environment. | |
266 void WriteTranslation(LEnvironment* environment, Translation* translation); | |
267 | |
268 void AddToTranslation(LEnvironment* environment, | |
269 Translation* translation, | |
270 LOperand* op, | |
271 bool is_tagged, | |
272 bool is_uint32, | |
273 int* object_index_pointer, | |
274 int* dematerialized_index_pointer); | |
275 | |
276 void SaveCallerDoubles(); | |
277 void RestoreCallerDoubles(); | |
278 | |
279 // Code generation steps. Returns true if code generation should continue. | |
280 void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE; | |
281 bool GeneratePrologue(); | |
282 bool GenerateDeferredCode(); | |
283 bool GenerateDeoptJumpTable(); | |
284 bool GenerateSafepointTable(); | |
285 | |
286 // Generates the custom OSR entrypoint and sets the osr_pc_offset. | |
287 void GenerateOsrPrologue(); | |
288 | |
289 enum SafepointMode { | |
290 RECORD_SIMPLE_SAFEPOINT, | |
291 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS | |
292 }; | |
293 | |
294 void CallCode(Handle<Code> code, | |
295 RelocInfo::Mode mode, | |
296 LInstruction* instr); | |
297 | |
298 void CallCodeGeneric(Handle<Code> code, | |
299 RelocInfo::Mode mode, | |
300 LInstruction* instr, | |
301 SafepointMode safepoint_mode); | |
302 | |
303 void CallRuntime(const Runtime::Function* function, | |
304 int num_arguments, | |
305 LInstruction* instr, | |
306 SaveFPRegsMode save_doubles = kDontSaveFPRegs); | |
307 | |
308 void CallRuntime(Runtime::FunctionId id, | |
309 int num_arguments, | |
310 LInstruction* instr) { | |
311 const Runtime::Function* function = Runtime::FunctionForId(id); | |
312 CallRuntime(function, num_arguments, instr); | |
313 } | |
314 | |
315 void LoadContextFromDeferred(LOperand* context); | |
316 void CallRuntimeFromDeferred(Runtime::FunctionId id, | |
317 int argc, | |
318 LInstruction* instr, | |
319 LOperand* context); | |
320 | |
321 // Generate a direct call to a known function. | |
322 // If the function is already loaded into x1 by the caller, function_reg may | |
323 // be set to x1. Otherwise, it must be NoReg, and CallKnownFunction will | |
324 // automatically load it. | |
325 void CallKnownFunction(Handle<JSFunction> function, | |
326 int formal_parameter_count, | |
327 int arity, | |
328 LInstruction* instr, | |
329 Register function_reg = NoReg); | |
330 | |
331 // Support for recording safepoint and position information. | |
332 void RecordAndWritePosition(int position) V8_OVERRIDE; | |
333 void RecordSafepoint(LPointerMap* pointers, | |
334 Safepoint::Kind kind, | |
335 int arguments, | |
336 Safepoint::DeoptMode mode); | |
337 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); | |
338 void RecordSafepoint(Safepoint::DeoptMode mode); | |
339 void RecordSafepointWithRegisters(LPointerMap* pointers, | |
340 int arguments, | |
341 Safepoint::DeoptMode mode); | |
342 void RecordSafepointWithRegistersAndDoubles(LPointerMap* pointers, | |
343 int arguments, | |
344 Safepoint::DeoptMode mode); | |
345 void RecordSafepointWithLazyDeopt(LInstruction* instr, | |
346 SafepointMode safepoint_mode); | |
347 | |
348 void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE; | |
349 | |
350 ZoneList<LEnvironment*> deoptimizations_; | |
351 ZoneList<Deoptimizer::JumpTableEntry*> deopt_jump_table_; | |
352 ZoneList<Handle<Object> > deoptimization_literals_; | |
353 int inlined_function_count_; | |
354 Scope* const scope_; | |
355 TranslationBuffer translations_; | |
356 ZoneList<LDeferredCode*> deferred_; | |
357 int osr_pc_offset_; | |
358 bool frame_is_built_; | |
359 | |
360 // Builder that keeps track of safepoints in the code. The table itself is | |
361 // emitted at the end of the generated code. | |
362 SafepointTableBuilder safepoints_; | |
363 | |
364 // Compiler from a set of parallel moves to a sequential list of moves. | |
365 LGapResolver resolver_; | |
366 | |
367 Safepoint::Kind expected_safepoint_kind_; | |
368 | |
369 int old_position_; | |
370 | |
371 class PushSafepointRegistersScope BASE_EMBEDDED { | |
372 public: | |
373 PushSafepointRegistersScope(LCodeGen* codegen, | |
374 Safepoint::Kind kind) | |
375 : codegen_(codegen) { | |
376 ASSERT(codegen_->info()->is_calling()); | |
377 ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); | |
378 codegen_->expected_safepoint_kind_ = kind; | |
379 | |
380 switch (codegen_->expected_safepoint_kind_) { | |
381 case Safepoint::kWithRegisters: | |
382 codegen_->masm_->PushSafepointRegisters(); | |
383 break; | |
384 case Safepoint::kWithRegistersAndDoubles: | |
385 codegen_->masm_->PushSafepointRegisters(); | |
386 codegen_->masm_->PushSafepointFPRegisters(); | |
387 break; | |
388 default: | |
389 UNREACHABLE(); | |
390 } | |
391 } | |
392 | |
393 ~PushSafepointRegistersScope() { | |
394 Safepoint::Kind kind = codegen_->expected_safepoint_kind_; | |
395 ASSERT((kind & Safepoint::kWithRegisters) != 0); | |
396 switch (kind) { | |
397 case Safepoint::kWithRegisters: | |
398 codegen_->masm_->PopSafepointRegisters(); | |
399 break; | |
400 case Safepoint::kWithRegistersAndDoubles: | |
401 codegen_->masm_->PopSafepointFPRegisters(); | |
402 codegen_->masm_->PopSafepointRegisters(); | |
403 break; | |
404 default: | |
405 UNREACHABLE(); | |
406 } | |
407 codegen_->expected_safepoint_kind_ = Safepoint::kSimple; | |
408 } | |
409 | |
410 private: | |
411 LCodeGen* codegen_; | |
412 }; | |
413 | |
414 friend class LDeferredCode; | |
415 friend class SafepointGenerator; | |
416 DISALLOW_COPY_AND_ASSIGN(LCodeGen); | |
417 }; | |
418 | |
419 | |
420 class LDeferredCode: public ZoneObject { | |
421 public: | |
422 explicit LDeferredCode(LCodeGen* codegen) | |
423 : codegen_(codegen), | |
424 external_exit_(NULL), | |
425 instruction_index_(codegen->current_instruction_) { | |
426 codegen->AddDeferredCode(this); | |
427 } | |
428 | |
429 virtual ~LDeferredCode() { } | |
430 virtual void Generate() = 0; | |
431 virtual LInstruction* instr() = 0; | |
432 | |
433 void SetExit(Label* exit) { external_exit_ = exit; } | |
434 Label* entry() { return &entry_; } | |
435 Label* exit() { return (external_exit_ != NULL) ? external_exit_ : &exit_; } | |
436 int instruction_index() const { return instruction_index_; } | |
437 | |
438 protected: | |
439 LCodeGen* codegen() const { return codegen_; } | |
440 MacroAssembler* masm() const { return codegen_->masm(); } | |
441 | |
442 private: | |
443 LCodeGen* codegen_; | |
444 Label entry_; | |
445 Label exit_; | |
446 Label* external_exit_; | |
447 int instruction_index_; | |
448 }; | |
449 | |
450 | |
451 // This is the abstract class used by EmitBranchGeneric. | |
452 // It is used to emit code for conditional branching. The Emit() function | |
453 // emits code to branch when the condition holds and EmitInverted() emits | |
454 // the branch when the inverted condition is verified. | |
455 // | |
456 // For actual examples of condition see the concrete implementation in | |
457 // lithium-codegen-a64.cc (e.g. BranchOnCondition, CompareAndBranch). | |
458 class BranchGenerator BASE_EMBEDDED { | |
459 public: | |
460 explicit BranchGenerator(LCodeGen* codegen) | |
461 : codegen_(codegen) { } | |
462 | |
463 virtual ~BranchGenerator() { } | |
464 | |
465 virtual void Emit(Label* label) const = 0; | |
466 virtual void EmitInverted(Label* label) const = 0; | |
467 | |
468 protected: | |
469 MacroAssembler* masm() const { return codegen_->masm(); } | |
470 | |
471 LCodeGen* codegen_; | |
472 }; | |
473 | |
474 } } // namespace v8::internal | |
475 | |
476 #endif // V8_A64_LITHIUM_CODEGEN_A64_H_ | |
OLD | NEW |